diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/.coveragerc b/owl-bot-staging/google-cloud-contentwarehouse/v1/.coveragerc
new file mode 100644
index 000000000000..a1299b0a58fe
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/.coveragerc
@@ -0,0 +1,13 @@
+[run]
+branch = True
+
+[report]
+show_missing = True
+omit =
+    google/cloud/contentwarehouse/__init__.py
+    google/cloud/contentwarehouse/gapic_version.py
+exclude_lines =
+    # Re-enable the standard pragma
+    pragma: NO COVER
+    # Ignore debug-only repr
+    def __repr__
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/.flake8 b/owl-bot-staging/google-cloud-contentwarehouse/v1/.flake8
new file mode 100644
index 000000000000..29227d4cf419
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/.flake8
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+[flake8]
+ignore = E203, E266, E501, W503
+exclude =
+  # Exclude generated code.
+  **/proto/**
+  **/gapic/**
+  **/services/**
+  **/types/**
+  *_pb2.py
+
+  # Standard linting exemptions.
+  **/.nox/**
+  __pycache__,
+  .git,
+  *.pyc,
+  conf.py
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/MANIFEST.in b/owl-bot-staging/google-cloud-contentwarehouse/v1/MANIFEST.in
new file mode 100644
index 000000000000..a86ac6eb964d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/MANIFEST.in
@@ -0,0 +1,2 @@
+recursive-include google/cloud/contentwarehouse *.py
+recursive-include google/cloud/contentwarehouse_v1 *.py
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/README.rst
new file mode 100644
index 000000000000..38ba2af35705
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/README.rst
@@ -0,0 +1,49 @@
+Python Client for Google Cloud Contentwarehouse API
+=================================================
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. Enable the Google Cloud Contentwarehouse API.
+4. `Setup Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With `virtualenv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+    python3 -m venv <your-env>
+    source <your-env>/bin/activate
+    <your-env>/bin/pip install /path/to/library
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+    python3 -m venv <your-env>
+    <your-env>\Scripts\activate
+    <your-env>\Scripts\pip.exe install \path\to\library
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/_static/custom.css
new file mode 100644
index 000000000000..06423be0b592
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/_static/custom.css
@@ -0,0 +1,3 @@
+dl.field-list > dt {
+    min-width: 100px
+}
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/conf.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/conf.py
new file mode 100644
index 000000000000..322be6ce8444
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/conf.py
@@ -0,0 +1,376 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+# google-cloud-contentwarehouse documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath(".."))
+
+__version__ = "0.1.0"
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "4.0.1"
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    "sphinx.ext.autodoc",
+    "sphinx.ext.autosummary",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.coverage",
+    "sphinx.ext.napoleon",
+    "sphinx.ext.todo",
+    "sphinx.ext.viewcode",
+]
+
+# autodoc/autosummary flags
+autoclass_content = "both"
+autodoc_default_flags = ["members"]
+autosummary_generate = True
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# Allow markdown includes (so releases.md can include CHANGLEOG.md)
+# http://www.sphinx-doc.org/en/master/markdown.html
+source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+source_suffix = [".rst", ".md"]
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The root toctree document.
+root_doc = "index"
+
+# General information about the project.
+project = u"google-cloud-contentwarehouse"
+copyright = u"2023, Google, LLC"
+author = u"Google APIs"         # TODO: autogenerate this bit
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The full version, including alpha/beta/rc tags.
+release = __version__
+# The short X.Y version.
+version = ".".join(release.split(".")[0:2])
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = 'en'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = "alabaster"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+    "description": "Google Cloud Client Libraries for Python",
+    "github_user": "googleapis",
+    "github_repo": "google-cloud-python",
+    "github_banner": True,
+    "font_family": "'Roboto', Georgia, sans",
+    "head_font_family": "'Roboto', Georgia, serif",
+    "code_font_family": "'Roboto Mono', 'Consolas', monospace",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "google-cloud-contentwarehouse-doc"
+
+# -- Options for warnings ------------------------------------------------------
+
+
+suppress_warnings = [
+    # Temporarily suppress this to avoid "more than one target found for
+    # cross-reference" warning, which are intractable for us to avoid while in
+    # a mono-repo.
+    # See https://github.com/sphinx-doc/sphinx/blob
+    # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
+    "ref.python"
+]
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    # 'papersize': 'letterpaper',
+    # The font size ('10pt', '11pt' or '12pt').
+    # 'pointsize': '10pt',
+    # Additional stuff for the LaTeX preamble.
+    # 'preamble': '',
+    # Latex figure (float) alignment
+    # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+    (
+        root_doc,
+        "google-cloud-contentwarehouse.tex",
+        u"google-cloud-contentwarehouse Documentation",
+        author,
+        "manual",
+    )
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (
+        root_doc,
+        "google-cloud-contentwarehouse",
+        u"Google Cloud Contentwarehouse Documentation",
+        [author],
+        1,
+    )
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    (
+        root_doc,
+        "google-cloud-contentwarehouse",
+        u"google-cloud-contentwarehouse Documentation",
+        author,
+        "google-cloud-contentwarehouse",
+        "GAPIC library for Google Cloud Contentwarehouse API",
+        "APIs",
+    )
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+    "python": ("http://python.readthedocs.org/en/latest/", None),
+    "gax": ("https://gax-python.readthedocs.org/en/latest/", None),
+    "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+    "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None),
+    "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+    "grpc": ("https://grpc.io/grpc/python/", None),
+    "requests": ("http://requests.kennethreitz.org/en/stable/", None),
+    "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None),
+    "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
+}
+
+
+# Napoleon settings
+napoleon_google_docstring = True
+napoleon_numpy_docstring = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = False
+napoleon_use_admonition_for_notes = False
+napoleon_use_admonition_for_references = False
+napoleon_use_ivar = False
+napoleon_use_param = True
+napoleon_use_rtype = True
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_link_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_link_service.rst
new file mode 100644
index 000000000000..e002d2ae556e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_link_service.rst
@@ -0,0 +1,10 @@
+DocumentLinkService
+-------------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_link_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_link_service.pagers
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_schema_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_schema_service.rst
new file mode 100644
index 000000000000..ca3c26b276db
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_schema_service.rst
@@ -0,0 +1,10 @@
+DocumentSchemaService
+---------------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_schema_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_schema_service.pagers
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_service.rst
new file mode 100644
index 000000000000..9164afdf4aa4
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/document_service.rst
@@ -0,0 +1,10 @@
+DocumentService
+---------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.document_service.pagers
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/pipeline_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/pipeline_service.rst
new file mode 100644
index 000000000000..6c62f7efa279
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/pipeline_service.rst
@@ -0,0 +1,6 @@
+PipelineService
+---------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.pipeline_service
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/rule_set_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/rule_set_service.rst
new file mode 100644
index 000000000000..86c15c91f25a
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/rule_set_service.rst
@@ -0,0 +1,10 @@
+RuleSetService
+--------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.rule_set_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.rule_set_service.pagers
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/services_.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/services_.rst
new file mode 100644
index 000000000000..3c2654988c59
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/services_.rst
@@ -0,0 +1,11 @@
+Services for Google Cloud Contentwarehouse v1 API
+=================================================
+.. toctree::
+    :maxdepth: 2
+
+    document_link_service
+    document_schema_service
+    document_service
+    pipeline_service
+    rule_set_service
+    synonym_set_service
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/synonym_set_service.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/synonym_set_service.rst
new file mode 100644
index 000000000000..c37fda82aeb6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/synonym_set_service.rst
@@ -0,0 +1,10 @@
+SynonymSetService
+-----------------------------------
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.synonym_set_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.contentwarehouse_v1.services.synonym_set_service.pagers
+    :members:
+    :inherited-members:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/types_.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/types_.rst
new file mode 100644
index 000000000000..0eb0d3ef4f0c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/contentwarehouse_v1/types_.rst
@@ -0,0 +1,6 @@
+Types for Google Cloud Contentwarehouse v1 API
+==============================================
+
+.. automodule:: google.cloud.contentwarehouse_v1.types
+    :members:
+    :show-inheritance:
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/index.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/index.rst
new file mode 100644
index 000000000000..cdb9459acaf1
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/docs/index.rst
@@ -0,0 +1,7 @@
+API Reference
+-------------
+.. toctree::
+    :maxdepth: 2
+
+    contentwarehouse_v1/services_
+    contentwarehouse_v1/types_
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/__init__.py
new file mode 100644
index 000000000000..190f1941c82e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/__init__.py
@@ -0,0 +1,271 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.cloud.contentwarehouse import gapic_version as package_version
+
+__version__ = package_version.__version__
+
+
+from google.cloud.contentwarehouse_v1.services.document_link_service.client import DocumentLinkServiceClient
+from google.cloud.contentwarehouse_v1.services.document_link_service.async_client import DocumentLinkServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.document_schema_service.client import DocumentSchemaServiceClient
+from google.cloud.contentwarehouse_v1.services.document_schema_service.async_client import DocumentSchemaServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.document_service.client import DocumentServiceClient
+from google.cloud.contentwarehouse_v1.services.document_service.async_client import DocumentServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.pipeline_service.client import PipelineServiceClient
+from google.cloud.contentwarehouse_v1.services.pipeline_service.async_client import PipelineServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.rule_set_service.client import RuleSetServiceClient
+from google.cloud.contentwarehouse_v1.services.rule_set_service.async_client import RuleSetServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.synonym_set_service.client import SynonymSetServiceClient
+from google.cloud.contentwarehouse_v1.services.synonym_set_service.async_client import SynonymSetServiceAsyncClient
+
+from google.cloud.contentwarehouse_v1.types.async_document_service_request import CreateDocumentMetadata
+from google.cloud.contentwarehouse_v1.types.async_document_service_request import UpdateDocumentMetadata
+from google.cloud.contentwarehouse_v1.types.common import MergeFieldsOptions
+from google.cloud.contentwarehouse_v1.types.common import RequestMetadata
+from google.cloud.contentwarehouse_v1.types.common import ResponseMetadata
+from google.cloud.contentwarehouse_v1.types.common import UpdateOptions
+from google.cloud.contentwarehouse_v1.types.common import UserInfo
+from google.cloud.contentwarehouse_v1.types.common import AccessControlMode
+from google.cloud.contentwarehouse_v1.types.common import DatabaseType
+from google.cloud.contentwarehouse_v1.types.common import DocumentCreatorDefaultRole
+from google.cloud.contentwarehouse_v1.types.common import UpdateType
+from google.cloud.contentwarehouse_v1.types.document import DateTimeArray
+from google.cloud.contentwarehouse_v1.types.document import Document
+from google.cloud.contentwarehouse_v1.types.document import DocumentReference
+from google.cloud.contentwarehouse_v1.types.document import EnumArray
+from google.cloud.contentwarehouse_v1.types.document import EnumValue
+from google.cloud.contentwarehouse_v1.types.document import FloatArray
+from google.cloud.contentwarehouse_v1.types.document import IntegerArray
+from google.cloud.contentwarehouse_v1.types.document import MapProperty
+from google.cloud.contentwarehouse_v1.types.document import Property
+from google.cloud.contentwarehouse_v1.types.document import PropertyArray
+from google.cloud.contentwarehouse_v1.types.document import TextArray
+from google.cloud.contentwarehouse_v1.types.document import TimestampArray
+from google.cloud.contentwarehouse_v1.types.document import TimestampValue
+from google.cloud.contentwarehouse_v1.types.document import Value
+from google.cloud.contentwarehouse_v1.types.document import ContentCategory
+from google.cloud.contentwarehouse_v1.types.document import RawDocumentFileType
+from google.cloud.contentwarehouse_v1.types.document_link_service import CreateDocumentLinkRequest
+from google.cloud.contentwarehouse_v1.types.document_link_service import DeleteDocumentLinkRequest
+from google.cloud.contentwarehouse_v1.types.document_link_service import DocumentLink
+from google.cloud.contentwarehouse_v1.types.document_link_service import ListLinkedSourcesRequest
+from google.cloud.contentwarehouse_v1.types.document_link_service import ListLinkedSourcesResponse
+from google.cloud.contentwarehouse_v1.types.document_link_service import ListLinkedTargetsRequest
+from google.cloud.contentwarehouse_v1.types.document_link_service import ListLinkedTargetsResponse
+from google.cloud.contentwarehouse_v1.types.document_schema import DateTimeTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import DocumentSchema
+from google.cloud.contentwarehouse_v1.types.document_schema import EnumTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import FloatTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import IntegerTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import MapTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import PropertyDefinition
+from google.cloud.contentwarehouse_v1.types.document_schema import PropertyTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import TextTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema import TimestampTypeOptions
+from google.cloud.contentwarehouse_v1.types.document_schema_service import CreateDocumentSchemaRequest
+from google.cloud.contentwarehouse_v1.types.document_schema_service import DeleteDocumentSchemaRequest
+from google.cloud.contentwarehouse_v1.types.document_schema_service import GetDocumentSchemaRequest
+from google.cloud.contentwarehouse_v1.types.document_schema_service import ListDocumentSchemasRequest
+from google.cloud.contentwarehouse_v1.types.document_schema_service import ListDocumentSchemasResponse
+from google.cloud.contentwarehouse_v1.types.document_schema_service import UpdateDocumentSchemaRequest
+from google.cloud.contentwarehouse_v1.types.document_service import CreateDocumentResponse
+from google.cloud.contentwarehouse_v1.types.document_service import FetchAclResponse
+from google.cloud.contentwarehouse_v1.types.document_service import QAResult
+from google.cloud.contentwarehouse_v1.types.document_service import SearchDocumentsResponse
+from google.cloud.contentwarehouse_v1.types.document_service import SetAclResponse
+from google.cloud.contentwarehouse_v1.types.document_service import UpdateDocumentResponse
+from google.cloud.contentwarehouse_v1.types.document_service_request import CloudAIDocumentOption
+from google.cloud.contentwarehouse_v1.types.document_service_request import CreateDocumentRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import DeleteDocumentRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import FetchAclRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import GetDocumentRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import LockDocumentRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import SearchDocumentsRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import SetAclRequest
+from google.cloud.contentwarehouse_v1.types.document_service_request import UpdateDocumentRequest
+from google.cloud.contentwarehouse_v1.types.filters import CustomWeightsMetadata
+from google.cloud.contentwarehouse_v1.types.filters import DocumentQuery
+from google.cloud.contentwarehouse_v1.types.filters import FileTypeFilter
+from google.cloud.contentwarehouse_v1.types.filters import PropertyFilter
+from google.cloud.contentwarehouse_v1.types.filters import TimeFilter
+from google.cloud.contentwarehouse_v1.types.filters import WeightedSchemaProperty
+from google.cloud.contentwarehouse_v1.types.histogram import HistogramQuery
+from google.cloud.contentwarehouse_v1.types.histogram import HistogramQueryPropertyNameFilter
+from google.cloud.contentwarehouse_v1.types.histogram import HistogramQueryResult
+from google.cloud.contentwarehouse_v1.types.pipeline_service import RunPipelineRequest
+from google.cloud.contentwarehouse_v1.types.pipelines import ExportToCdwPipeline
+from google.cloud.contentwarehouse_v1.types.pipelines import GcsIngestPipeline
+from google.cloud.contentwarehouse_v1.types.pipelines import GcsIngestWithDocAiProcessorsPipeline
+from google.cloud.contentwarehouse_v1.types.pipelines import IngestPipelineConfig
+from google.cloud.contentwarehouse_v1.types.pipelines import ProcessorInfo
+from google.cloud.contentwarehouse_v1.types.pipelines import ProcessWithDocAiPipeline
+from google.cloud.contentwarehouse_v1.types.pipelines import RunPipelineMetadata
+from google.cloud.contentwarehouse_v1.types.pipelines import RunPipelineResponse
+from google.cloud.contentwarehouse_v1.types.rule_engine import AccessControlAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import Action
+from google.cloud.contentwarehouse_v1.types.rule_engine import ActionExecutorOutput
+from google.cloud.contentwarehouse_v1.types.rule_engine import ActionOutput
+from google.cloud.contentwarehouse_v1.types.rule_engine import AddToFolderAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import DataUpdateAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import DataValidationAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import DeleteDocumentAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import InvalidRule
+from google.cloud.contentwarehouse_v1.types.rule_engine import PublishAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import RemoveFromFolderAction
+from google.cloud.contentwarehouse_v1.types.rule_engine import Rule
+from google.cloud.contentwarehouse_v1.types.rule_engine import RuleActionsPair
+from google.cloud.contentwarehouse_v1.types.rule_engine import RuleEngineOutput
+from google.cloud.contentwarehouse_v1.types.rule_engine import RuleEvaluatorOutput
+from google.cloud.contentwarehouse_v1.types.rule_engine import RuleSet
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import CreateRuleSetRequest
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import DeleteRuleSetRequest
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import GetRuleSetRequest
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import ListRuleSetsRequest
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import ListRuleSetsResponse
+from google.cloud.contentwarehouse_v1.types.ruleset_service_request import UpdateRuleSetRequest
+from google.cloud.contentwarehouse_v1.types.synonymset import SynonymSet
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import CreateSynonymSetRequest
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import DeleteSynonymSetRequest
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import GetSynonymSetRequest
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import ListSynonymSetsRequest
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import ListSynonymSetsResponse
+from google.cloud.contentwarehouse_v1.types.synonymset_service_request import UpdateSynonymSetRequest
+
+__all__ = ('DocumentLinkServiceClient',
+    'DocumentLinkServiceAsyncClient',
+    'DocumentSchemaServiceClient',
+    'DocumentSchemaServiceAsyncClient',
+    'DocumentServiceClient',
+    'DocumentServiceAsyncClient',
+    'PipelineServiceClient',
+    'PipelineServiceAsyncClient',
+    'RuleSetServiceClient',
+    'RuleSetServiceAsyncClient',
+    'SynonymSetServiceClient',
+    'SynonymSetServiceAsyncClient',
+    'CreateDocumentMetadata',
+    'UpdateDocumentMetadata',
+    'MergeFieldsOptions',
+    'RequestMetadata',
+    'ResponseMetadata',
+    'UpdateOptions',
+    'UserInfo',
+    'AccessControlMode',
+    'DatabaseType',
+    'DocumentCreatorDefaultRole',
+    'UpdateType',
+    'DateTimeArray',
+    'Document',
+    'DocumentReference',
+    'EnumArray',
+    'EnumValue',
+    'FloatArray',
+    'IntegerArray',
+    'MapProperty',
+    'Property',
+    'PropertyArray',
+    'TextArray',
+    'TimestampArray',
+    'TimestampValue',
+    'Value',
+    'ContentCategory',
+    'RawDocumentFileType',
+    'CreateDocumentLinkRequest',
+    'DeleteDocumentLinkRequest',
+    'DocumentLink',
+    'ListLinkedSourcesRequest',
+    'ListLinkedSourcesResponse',
+    'ListLinkedTargetsRequest',
+    'ListLinkedTargetsResponse',
+    'DateTimeTypeOptions',
+    'DocumentSchema',
+    'EnumTypeOptions',
+    'FloatTypeOptions',
+    'IntegerTypeOptions',
+    'MapTypeOptions',
+    'PropertyDefinition',
+    'PropertyTypeOptions',
+    'TextTypeOptions',
+    'TimestampTypeOptions',
+    'CreateDocumentSchemaRequest',
+    'DeleteDocumentSchemaRequest',
+    'GetDocumentSchemaRequest',
+    'ListDocumentSchemasRequest',
+    'ListDocumentSchemasResponse',
+    'UpdateDocumentSchemaRequest',
+    'CreateDocumentResponse',
+    'FetchAclResponse',
+    'QAResult',
+    'SearchDocumentsResponse',
+    'SetAclResponse',
+    'UpdateDocumentResponse',
+    'CloudAIDocumentOption',
+    'CreateDocumentRequest',
+    'DeleteDocumentRequest',
+    'FetchAclRequest',
+    'GetDocumentRequest',
+    'LockDocumentRequest',
+    'SearchDocumentsRequest',
+    'SetAclRequest',
+    'UpdateDocumentRequest',
+    'CustomWeightsMetadata',
+    'DocumentQuery',
+    'FileTypeFilter',
+    'PropertyFilter',
+    'TimeFilter',
+    'WeightedSchemaProperty',
+    'HistogramQuery',
+    'HistogramQueryPropertyNameFilter',
+    'HistogramQueryResult',
+    'RunPipelineRequest',
+    'ExportToCdwPipeline',
+    'GcsIngestPipeline',
+    'GcsIngestWithDocAiProcessorsPipeline',
+    'IngestPipelineConfig',
+    'ProcessorInfo',
+    'ProcessWithDocAiPipeline',
+    'RunPipelineMetadata',
+    'RunPipelineResponse',
+    'AccessControlAction',
+    'Action',
+    'ActionExecutorOutput',
+    'ActionOutput',
+    'AddToFolderAction',
+    'DataUpdateAction',
+    'DataValidationAction',
+    'DeleteDocumentAction',
+    'InvalidRule',
+    'PublishAction',
+    'RemoveFromFolderAction',
+    'Rule',
+    'RuleActionsPair',
+    'RuleEngineOutput',
+    'RuleEvaluatorOutput',
+    'RuleSet',
+    'CreateRuleSetRequest',
+    'DeleteRuleSetRequest',
+    'GetRuleSetRequest',
+    'ListRuleSetsRequest',
+    'ListRuleSetsResponse',
+    'UpdateRuleSetRequest',
+    'SynonymSet',
+    'CreateSynonymSetRequest',
+    'DeleteSynonymSetRequest',
+    'GetSynonymSetRequest',
+    'ListSynonymSetsRequest',
+    'ListSynonymSetsResponse',
+    'UpdateSynonymSetRequest',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/gapic_version.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/gapic_version.py
new file mode 100644
index 000000000000..558c8aab67c5
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/gapic_version.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+__version__ = "0.0.0"  # {x-release-please-version}
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/py.typed b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/py.typed
new file mode 100644
index 000000000000..c1738e42e019
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-contentwarehouse package uses inline types.
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/__init__.py
new file mode 100644
index 000000000000..8e2ba2bb2734
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/__init__.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+__version__ = package_version.__version__
+
+
+from .services.document_link_service import DocumentLinkServiceClient
+from .services.document_link_service import DocumentLinkServiceAsyncClient
+from .services.document_schema_service import DocumentSchemaServiceClient
+from .services.document_schema_service import DocumentSchemaServiceAsyncClient
+from .services.document_service import DocumentServiceClient
+from .services.document_service import DocumentServiceAsyncClient
+from .services.pipeline_service import PipelineServiceClient
+from .services.pipeline_service import PipelineServiceAsyncClient
+from .services.rule_set_service import RuleSetServiceClient
+from .services.rule_set_service import RuleSetServiceAsyncClient
+from .services.synonym_set_service import SynonymSetServiceClient
+from .services.synonym_set_service import SynonymSetServiceAsyncClient
+
+from .types.async_document_service_request import CreateDocumentMetadata
+from .types.async_document_service_request import UpdateDocumentMetadata
+from .types.common import MergeFieldsOptions
+from .types.common import RequestMetadata
+from .types.common import ResponseMetadata
+from .types.common import UpdateOptions
+from .types.common import UserInfo
+from .types.common import AccessControlMode
+from .types.common import DatabaseType
+from .types.common import DocumentCreatorDefaultRole
+from .types.common import UpdateType
+from .types.document import DateTimeArray
+from .types.document import Document
+from .types.document import DocumentReference
+from .types.document import EnumArray
+from .types.document import EnumValue
+from .types.document import FloatArray
+from .types.document import IntegerArray
+from .types.document import MapProperty
+from .types.document import Property
+from .types.document import PropertyArray
+from .types.document import TextArray
+from .types.document import TimestampArray
+from .types.document import TimestampValue
+from .types.document import Value
+from .types.document import ContentCategory
+from .types.document import RawDocumentFileType
+from .types.document_link_service import CreateDocumentLinkRequest
+from .types.document_link_service import DeleteDocumentLinkRequest
+from .types.document_link_service import DocumentLink
+from .types.document_link_service import ListLinkedSourcesRequest
+from .types.document_link_service import ListLinkedSourcesResponse
+from .types.document_link_service import ListLinkedTargetsRequest
+from .types.document_link_service import ListLinkedTargetsResponse
+from .types.document_schema import DateTimeTypeOptions
+from .types.document_schema import DocumentSchema
+from .types.document_schema import EnumTypeOptions
+from .types.document_schema import FloatTypeOptions
+from .types.document_schema import IntegerTypeOptions
+from .types.document_schema import MapTypeOptions
+from .types.document_schema import PropertyDefinition
+from .types.document_schema import PropertyTypeOptions
+from .types.document_schema import TextTypeOptions
+from .types.document_schema import TimestampTypeOptions
+from .types.document_schema_service import CreateDocumentSchemaRequest
+from .types.document_schema_service import DeleteDocumentSchemaRequest
+from .types.document_schema_service import GetDocumentSchemaRequest
+from .types.document_schema_service import ListDocumentSchemasRequest
+from .types.document_schema_service import ListDocumentSchemasResponse
+from .types.document_schema_service import UpdateDocumentSchemaRequest
+from .types.document_service import CreateDocumentResponse
+from .types.document_service import FetchAclResponse
+from .types.document_service import QAResult
+from .types.document_service import SearchDocumentsResponse
+from .types.document_service import SetAclResponse
+from .types.document_service import UpdateDocumentResponse
+from .types.document_service_request import CloudAIDocumentOption
+from .types.document_service_request import CreateDocumentRequest
+from .types.document_service_request import DeleteDocumentRequest
+from .types.document_service_request import FetchAclRequest
+from .types.document_service_request import GetDocumentRequest
+from .types.document_service_request import LockDocumentRequest
+from .types.document_service_request import SearchDocumentsRequest
+from .types.document_service_request import SetAclRequest
+from .types.document_service_request import UpdateDocumentRequest
+from .types.filters import CustomWeightsMetadata
+from .types.filters import DocumentQuery
+from .types.filters import FileTypeFilter
+from .types.filters import PropertyFilter
+from .types.filters import TimeFilter
+from .types.filters import WeightedSchemaProperty
+from .types.histogram import HistogramQuery
+from .types.histogram import HistogramQueryPropertyNameFilter
+from .types.histogram import HistogramQueryResult
+from .types.pipeline_service import RunPipelineRequest
+from .types.pipelines import ExportToCdwPipeline
+from .types.pipelines import GcsIngestPipeline
+from .types.pipelines import GcsIngestWithDocAiProcessorsPipeline
+from .types.pipelines import IngestPipelineConfig
+from .types.pipelines import ProcessorInfo
+from .types.pipelines import ProcessWithDocAiPipeline
+from .types.pipelines import RunPipelineMetadata
+from .types.pipelines import RunPipelineResponse
+from .types.rule_engine import AccessControlAction
+from .types.rule_engine import Action
+from .types.rule_engine import ActionExecutorOutput
+from .types.rule_engine import ActionOutput
+from .types.rule_engine import AddToFolderAction
+from .types.rule_engine import DataUpdateAction
+from .types.rule_engine import DataValidationAction
+from .types.rule_engine import DeleteDocumentAction
+from .types.rule_engine import InvalidRule
+from .types.rule_engine import PublishAction
+from .types.rule_engine import RemoveFromFolderAction
+from .types.rule_engine import Rule
+from .types.rule_engine import RuleActionsPair
+from .types.rule_engine import RuleEngineOutput
+from .types.rule_engine import RuleEvaluatorOutput
+from .types.rule_engine import RuleSet
+from .types.ruleset_service_request import CreateRuleSetRequest
+from .types.ruleset_service_request import DeleteRuleSetRequest
+from .types.ruleset_service_request import GetRuleSetRequest
+from .types.ruleset_service_request import ListRuleSetsRequest
+from .types.ruleset_service_request import ListRuleSetsResponse
+from .types.ruleset_service_request import UpdateRuleSetRequest
+from .types.synonymset import SynonymSet
+from .types.synonymset_service_request import CreateSynonymSetRequest
+from .types.synonymset_service_request import DeleteSynonymSetRequest
+from .types.synonymset_service_request import GetSynonymSetRequest
+from .types.synonymset_service_request import ListSynonymSetsRequest
+from .types.synonymset_service_request import ListSynonymSetsResponse
+from .types.synonymset_service_request import UpdateSynonymSetRequest
+
+__all__ = (
+    'DocumentLinkServiceAsyncClient',
+    'DocumentSchemaServiceAsyncClient',
+    'DocumentServiceAsyncClient',
+    'PipelineServiceAsyncClient',
+    'RuleSetServiceAsyncClient',
+    'SynonymSetServiceAsyncClient',
+'AccessControlAction',
+'AccessControlMode',
+'Action',
+'ActionExecutorOutput',
+'ActionOutput',
+'AddToFolderAction',
+'CloudAIDocumentOption',
+'ContentCategory',
+'CreateDocumentLinkRequest',
+'CreateDocumentMetadata',
+'CreateDocumentRequest',
+'CreateDocumentResponse',
+'CreateDocumentSchemaRequest',
+'CreateRuleSetRequest',
+'CreateSynonymSetRequest',
+'CustomWeightsMetadata',
+'DataUpdateAction',
+'DataValidationAction',
+'DatabaseType',
+'DateTimeArray',
+'DateTimeTypeOptions',
+'DeleteDocumentAction',
+'DeleteDocumentLinkRequest',
+'DeleteDocumentRequest',
+'DeleteDocumentSchemaRequest',
+'DeleteRuleSetRequest',
+'DeleteSynonymSetRequest',
+'Document',
+'DocumentCreatorDefaultRole',
+'DocumentLink',
+'DocumentLinkServiceClient',
+'DocumentQuery',
+'DocumentReference',
+'DocumentSchema',
+'DocumentSchemaServiceClient',
+'DocumentServiceClient',
+'EnumArray',
+'EnumTypeOptions',
+'EnumValue',
+'ExportToCdwPipeline',
+'FetchAclRequest',
+'FetchAclResponse',
+'FileTypeFilter',
+'FloatArray',
+'FloatTypeOptions',
+'GcsIngestPipeline',
+'GcsIngestWithDocAiProcessorsPipeline',
+'GetDocumentRequest',
+'GetDocumentSchemaRequest',
+'GetRuleSetRequest',
+'GetSynonymSetRequest',
+'HistogramQuery',
+'HistogramQueryPropertyNameFilter',
+'HistogramQueryResult',
+'IngestPipelineConfig',
+'IntegerArray',
+'IntegerTypeOptions',
+'InvalidRule',
+'ListDocumentSchemasRequest',
+'ListDocumentSchemasResponse',
+'ListLinkedSourcesRequest',
+'ListLinkedSourcesResponse',
+'ListLinkedTargetsRequest',
+'ListLinkedTargetsResponse',
+'ListRuleSetsRequest',
+'ListRuleSetsResponse',
+'ListSynonymSetsRequest',
+'ListSynonymSetsResponse',
+'LockDocumentRequest',
+'MapProperty',
+'MapTypeOptions',
+'MergeFieldsOptions',
+'PipelineServiceClient',
+'ProcessWithDocAiPipeline',
+'ProcessorInfo',
+'Property',
+'PropertyArray',
+'PropertyDefinition',
+'PropertyFilter',
+'PropertyTypeOptions',
+'PublishAction',
+'QAResult',
+'RawDocumentFileType',
+'RemoveFromFolderAction',
+'RequestMetadata',
+'ResponseMetadata',
+'Rule',
+'RuleActionsPair',
+'RuleEngineOutput',
+'RuleEvaluatorOutput',
+'RuleSet',
+'RuleSetServiceClient',
+'RunPipelineMetadata',
+'RunPipelineRequest',
+'RunPipelineResponse',
+'SearchDocumentsRequest',
+'SearchDocumentsResponse',
+'SetAclRequest',
+'SetAclResponse',
+'SynonymSet',
+'SynonymSetServiceClient',
+'TextArray',
+'TextTypeOptions',
+'TimeFilter',
+'TimestampArray',
+'TimestampTypeOptions',
+'TimestampValue',
+'UpdateDocumentMetadata',
+'UpdateDocumentRequest',
+'UpdateDocumentResponse',
+'UpdateDocumentSchemaRequest',
+'UpdateOptions',
+'UpdateRuleSetRequest',
+'UpdateSynonymSetRequest',
+'UpdateType',
+'UserInfo',
+'Value',
+'WeightedSchemaProperty',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_metadata.json
new file mode 100644
index 000000000000..755e453cc255
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_metadata.json
@@ -0,0 +1,543 @@
+ {
+  "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+  "language": "python",
+  "libraryPackage": "google.cloud.contentwarehouse_v1",
+  "protoPackage": "google.cloud.contentwarehouse.v1",
+  "schema": "1.0",
+  "services": {
+    "DocumentLinkService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "DocumentLinkServiceClient",
+          "rpcs": {
+            "CreateDocumentLink": {
+              "methods": [
+                "create_document_link"
+              ]
+            },
+            "DeleteDocumentLink": {
+              "methods": [
+                "delete_document_link"
+              ]
+            },
+            "ListLinkedSources": {
+              "methods": [
+                "list_linked_sources"
+              ]
+            },
+            "ListLinkedTargets": {
+              "methods": [
+                "list_linked_targets"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "DocumentLinkServiceAsyncClient",
+          "rpcs": {
+            "CreateDocumentLink": {
+              "methods": [
+                "create_document_link"
+              ]
+            },
+            "DeleteDocumentLink": {
+              "methods": [
+                "delete_document_link"
+              ]
+            },
+            "ListLinkedSources": {
+              "methods": [
+                "list_linked_sources"
+              ]
+            },
+            "ListLinkedTargets": {
+              "methods": [
+                "list_linked_targets"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "DocumentLinkServiceClient",
+          "rpcs": {
+            "CreateDocumentLink": {
+              "methods": [
+                "create_document_link"
+              ]
+            },
+            "DeleteDocumentLink": {
+              "methods": [
+                "delete_document_link"
+              ]
+            },
+            "ListLinkedSources": {
+              "methods": [
+                "list_linked_sources"
+              ]
+            },
+            "ListLinkedTargets": {
+              "methods": [
+                "list_linked_targets"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "DocumentSchemaService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "DocumentSchemaServiceClient",
+          "rpcs": {
+            "CreateDocumentSchema": {
+              "methods": [
+                "create_document_schema"
+              ]
+            },
+            "DeleteDocumentSchema": {
+              "methods": [
+                "delete_document_schema"
+              ]
+            },
+            "GetDocumentSchema": {
+              "methods": [
+                "get_document_schema"
+              ]
+            },
+            "ListDocumentSchemas": {
+              "methods": [
+                "list_document_schemas"
+              ]
+            },
+            "UpdateDocumentSchema": {
+              "methods": [
+                "update_document_schema"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "DocumentSchemaServiceAsyncClient",
+          "rpcs": {
+            "CreateDocumentSchema": {
+              "methods": [
+                "create_document_schema"
+              ]
+            },
+            "DeleteDocumentSchema": {
+              "methods": [
+                "delete_document_schema"
+              ]
+            },
+            "GetDocumentSchema": {
+              "methods": [
+                "get_document_schema"
+              ]
+            },
+            "ListDocumentSchemas": {
+              "methods": [
+                "list_document_schemas"
+              ]
+            },
+            "UpdateDocumentSchema": {
+              "methods": [
+                "update_document_schema"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "DocumentSchemaServiceClient",
+          "rpcs": {
+            "CreateDocumentSchema": {
+              "methods": [
+                "create_document_schema"
+              ]
+            },
+            "DeleteDocumentSchema": {
+              "methods": [
+                "delete_document_schema"
+              ]
+            },
+            "GetDocumentSchema": {
+              "methods": [
+                "get_document_schema"
+              ]
+            },
+            "ListDocumentSchemas": {
+              "methods": [
+                "list_document_schemas"
+              ]
+            },
+            "UpdateDocumentSchema": {
+              "methods": [
+                "update_document_schema"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "DocumentService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "DocumentServiceClient",
+          "rpcs": {
+            "CreateDocument": {
+              "methods": [
+                "create_document"
+              ]
+            },
+            "DeleteDocument": {
+              "methods": [
+                "delete_document"
+              ]
+            },
+            "FetchAcl": {
+              "methods": [
+                "fetch_acl"
+              ]
+            },
+            "GetDocument": {
+              "methods": [
+                "get_document"
+              ]
+            },
+            "LockDocument": {
+              "methods": [
+                "lock_document"
+              ]
+            },
+            "SearchDocuments": {
+              "methods": [
+                "search_documents"
+              ]
+            },
+            "SetAcl": {
+              "methods": [
+                "set_acl"
+              ]
+            },
+            "UpdateDocument": {
+              "methods": [
+                "update_document"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "DocumentServiceAsyncClient",
+          "rpcs": {
+            "CreateDocument": {
+              "methods": [
+                "create_document"
+              ]
+            },
+            "DeleteDocument": {
+              "methods": [
+                "delete_document"
+              ]
+            },
+            "FetchAcl": {
+              "methods": [
+                "fetch_acl"
+              ]
+            },
+            "GetDocument": {
+              "methods": [
+                "get_document"
+              ]
+            },
+            "LockDocument": {
+              "methods": [
+                "lock_document"
+              ]
+            },
+            "SearchDocuments": {
+              "methods": [
+                "search_documents"
+              ]
+            },
+            "SetAcl": {
+              "methods": [
+                "set_acl"
+              ]
+            },
+            "UpdateDocument": {
+              "methods": [
+                "update_document"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "DocumentServiceClient",
+          "rpcs": {
+            "CreateDocument": {
+              "methods": [
+                "create_document"
+              ]
+            },
+            "DeleteDocument": {
+              "methods": [
+                "delete_document"
+              ]
+            },
+            "FetchAcl": {
+              "methods": [
+                "fetch_acl"
+              ]
+            },
+            "GetDocument": {
+              "methods": [
+                "get_document"
+              ]
+            },
+            "LockDocument": {
+              "methods": [
+                "lock_document"
+              ]
+            },
+            "SearchDocuments": {
+              "methods": [
+                "search_documents"
+              ]
+            },
+            "SetAcl": {
+              "methods": [
+                "set_acl"
+              ]
+            },
+            "UpdateDocument": {
+              "methods": [
+                "update_document"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "PipelineService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "PipelineServiceClient",
+          "rpcs": {
+            "RunPipeline": {
+              "methods": [
+                "run_pipeline"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "PipelineServiceAsyncClient",
+          "rpcs": {
+            "RunPipeline": {
+              "methods": [
+                "run_pipeline"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "PipelineServiceClient",
+          "rpcs": {
+            "RunPipeline": {
+              "methods": [
+                "run_pipeline"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "RuleSetService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "RuleSetServiceClient",
+          "rpcs": {
+            "CreateRuleSet": {
+              "methods": [
+                "create_rule_set"
+              ]
+            },
+            "DeleteRuleSet": {
+              "methods": [
+                "delete_rule_set"
+              ]
+            },
+            "GetRuleSet": {
+              "methods": [
+                "get_rule_set"
+              ]
+            },
+            "ListRuleSets": {
+              "methods": [
+                "list_rule_sets"
+              ]
+            },
+            "UpdateRuleSet": {
+              "methods": [
+                "update_rule_set"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "RuleSetServiceAsyncClient",
+          "rpcs": {
+            "CreateRuleSet": {
+              "methods": [
+                "create_rule_set"
+              ]
+            },
+            "DeleteRuleSet": {
+              "methods": [
+                "delete_rule_set"
+              ]
+            },
+            "GetRuleSet": {
+              "methods": [
+                "get_rule_set"
+              ]
+            },
+            "ListRuleSets": {
+              "methods": [
+                "list_rule_sets"
+              ]
+            },
+            "UpdateRuleSet": {
+              "methods": [
+                "update_rule_set"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "RuleSetServiceClient",
+          "rpcs": {
+            "CreateRuleSet": {
+              "methods": [
+                "create_rule_set"
+              ]
+            },
+            "DeleteRuleSet": {
+              "methods": [
+                "delete_rule_set"
+              ]
+            },
+            "GetRuleSet": {
+              "methods": [
+                "get_rule_set"
+              ]
+            },
+            "ListRuleSets": {
+              "methods": [
+                "list_rule_sets"
+              ]
+            },
+            "UpdateRuleSet": {
+              "methods": [
+                "update_rule_set"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "SynonymSetService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "SynonymSetServiceClient",
+          "rpcs": {
+            "CreateSynonymSet": {
+              "methods": [
+                "create_synonym_set"
+              ]
+            },
+            "DeleteSynonymSet": {
+              "methods": [
+                "delete_synonym_set"
+              ]
+            },
+            "GetSynonymSet": {
+              "methods": [
+                "get_synonym_set"
+              ]
+            },
+            "ListSynonymSets": {
+              "methods": [
+                "list_synonym_sets"
+              ]
+            },
+            "UpdateSynonymSet": {
+              "methods": [
+                "update_synonym_set"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "SynonymSetServiceAsyncClient",
+          "rpcs": {
+            "CreateSynonymSet": {
+              "methods": [
+                "create_synonym_set"
+              ]
+            },
+            "DeleteSynonymSet": {
+              "methods": [
+                "delete_synonym_set"
+              ]
+            },
+            "GetSynonymSet": {
+              "methods": [
+                "get_synonym_set"
+              ]
+            },
+            "ListSynonymSets": {
+              "methods": [
+                "list_synonym_sets"
+              ]
+            },
+            "UpdateSynonymSet": {
+              "methods": [
+                "update_synonym_set"
+              ]
+            }
+          }
+        },
+        "rest": {
+          "libraryClient": "SynonymSetServiceClient",
+          "rpcs": {
+            "CreateSynonymSet": {
+              "methods": [
+                "create_synonym_set"
+              ]
+            },
+            "DeleteSynonymSet": {
+              "methods": [
+                "delete_synonym_set"
+              ]
+            },
+            "GetSynonymSet": {
+              "methods": [
+                "get_synonym_set"
+              ]
+            },
+            "ListSynonymSets": {
+              "methods": [
+                "list_synonym_sets"
+              ]
+            },
+            "UpdateSynonymSet": {
+              "methods": [
+                "update_synonym_set"
+              ]
+            }
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_version.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_version.py
new file mode 100644
index 000000000000..558c8aab67c5
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/gapic_version.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+__version__ = "0.0.0"  # {x-release-please-version}
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/py.typed b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/py.typed
new file mode 100644
index 000000000000..c1738e42e019
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-contentwarehouse package uses inline types.
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/__init__.py
new file mode 100644
index 000000000000..8f6cf068242c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/__init__.py
new file mode 100644
index 000000000000..24ca5d4fe548
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import DocumentLinkServiceClient
+from .async_client import DocumentLinkServiceAsyncClient
+
+__all__ = (
+    'DocumentLinkServiceClient',
+    'DocumentLinkServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py
new file mode 100644
index 000000000000..a3c6b45ac038
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/async_client.py
@@ -0,0 +1,727 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_link_service import pagers
+from google.cloud.contentwarehouse_v1.types import document
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentLinkServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import DocumentLinkServiceGrpcAsyncIOTransport
+from .client import DocumentLinkServiceClient
+
+
+class DocumentLinkServiceAsyncClient:
+    """This service lets you manage document-links.
+    Document-Links are treated as sub-resources under source
+    documents.
+    """
+
+    _client: DocumentLinkServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = DocumentLinkServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = DocumentLinkServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+
+    document_path = staticmethod(DocumentLinkServiceClient.document_path)
+    parse_document_path = staticmethod(DocumentLinkServiceClient.parse_document_path)
+    document_link_path = staticmethod(DocumentLinkServiceClient.document_link_path)
+    parse_document_link_path = staticmethod(DocumentLinkServiceClient.parse_document_link_path)
+    common_billing_account_path = staticmethod(DocumentLinkServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(DocumentLinkServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(DocumentLinkServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(DocumentLinkServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(DocumentLinkServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(DocumentLinkServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(DocumentLinkServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(DocumentLinkServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(DocumentLinkServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(DocumentLinkServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentLinkServiceAsyncClient: The constructed client.
+        """
+        return DocumentLinkServiceClient.from_service_account_info.__func__(DocumentLinkServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentLinkServiceAsyncClient: The constructed client.
+        """
+        return DocumentLinkServiceClient.from_service_account_file.__func__(DocumentLinkServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return DocumentLinkServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> DocumentLinkServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentLinkServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = DocumentLinkServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentLinkServiceTransport, Callable[..., DocumentLinkServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document link service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentLinkServiceTransport,Callable[..., DocumentLinkServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentLinkServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = DocumentLinkServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def list_linked_targets(self,
+            request: Optional[Union[document_link_service.ListLinkedTargetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_link_service.ListLinkedTargetsResponse:
+        r"""Return all target document-links from the document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_list_linked_targets():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListLinkedTargetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = await client.list_linked_targets(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListLinkedTargetsRequest, dict]]):
+                The request object. Request message for
+                DocumentLinkService.ListLinkedTargets.
+            parent (:class:`str`):
+                Required. The name of the document, for which all target
+                links are returned. Format:
+                projects/{project_number}/locations/{location}/documents/{target_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.ListLinkedTargetsResponse:
+                Response message for
+                DocumentLinkService.ListLinkedTargets.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.ListLinkedTargetsRequest):
+            request = document_link_service.ListLinkedTargetsRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.list_linked_targets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def list_linked_sources(self,
+            request: Optional[Union[document_link_service.ListLinkedSourcesRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListLinkedSourcesAsyncPager:
+        r"""Return all source document-links from the document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_list_linked_sources():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListLinkedSourcesRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_linked_sources(request=request)
+
+                # Handle the response
+                async for response in page_result:
+                    print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest, dict]]):
+                The request object. Response message for
+                DocumentLinkService.ListLinkedSources.
+            parent (:class:`str`):
+                Required. The name of the document, for which all source
+                links are returned. Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_link_service.pagers.ListLinkedSourcesAsyncPager:
+                Response message for
+                DocumentLinkService.ListLinkedSources.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.ListLinkedSourcesRequest):
+            request = document_link_service.ListLinkedSourcesRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.list_linked_sources]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListLinkedSourcesAsyncPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def create_document_link(self,
+            request: Optional[Union[document_link_service.CreateDocumentLinkRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document_link: Optional[document_link_service.DocumentLink] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_link_service.DocumentLink:
+        r"""Create a link between a source document and a target
+        document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_create_document_link():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateDocumentLinkRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = await client.create_document_link(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.CreateDocumentLinkRequest, dict]]):
+                The request object. Request message for
+                DocumentLinkService.CreateDocumentLink.
+            parent (:class:`str`):
+                Required. Parent of the document-link to be created.
+                parent of document-link should be a document. Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_link (:class:`google.cloud.contentwarehouse_v1.types.DocumentLink`):
+                Required. Document links associated with the source
+                documents (source_document_id).
+
+                This corresponds to the ``document_link`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentLink:
+                A document-link between source and
+                target document.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document_link])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.CreateDocumentLinkRequest):
+            request = document_link_service.CreateDocumentLinkRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if document_link is not None:
+            request.document_link = document_link
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.create_document_link]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_document_link(self,
+            request: Optional[Union[document_link_service.DeleteDocumentLinkRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Remove the link between the source and target
+        documents.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_delete_document_link():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentLinkRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                await client.delete_document_link(request=request)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentLinkRequest, dict]]):
+                The request object. Request message for
+                DocumentLinkService.DeleteDocumentLink.
+            name (:class:`str`):
+                Required. The name of the document-link to be deleted.
+                Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}/documentLinks/{document_link_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.DeleteDocumentLinkRequest):
+            request = document_link_service.DeleteDocumentLinkRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.delete_document_link]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "DocumentLinkServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentLinkServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/client.py
new file mode 100644
index 000000000000..0822792599bc
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/client.py
@@ -0,0 +1,1091 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_link_service import pagers
+from google.cloud.contentwarehouse_v1.types import document
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentLinkServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import DocumentLinkServiceGrpcTransport
+from .transports.grpc_asyncio import DocumentLinkServiceGrpcAsyncIOTransport
+from .transports.rest import DocumentLinkServiceRestTransport
+
+
+class DocumentLinkServiceClientMeta(type):
+    """Metaclass for the DocumentLinkService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentLinkServiceTransport]]
+    _transport_registry["grpc"] = DocumentLinkServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = DocumentLinkServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = DocumentLinkServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[DocumentLinkServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class DocumentLinkServiceClient(metaclass=DocumentLinkServiceClientMeta):
+    """This service lets you manage document-links.
+    Document-Links are treated as sub-resources under source
+    documents.
+    """
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentLinkServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentLinkServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> DocumentLinkServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentLinkServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def document_path(project: str,location: str,document: str,) -> str:
+        """Returns a fully-qualified document string."""
+        return "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+
+    @staticmethod
+    def parse_document_path(path: str) -> Dict[str,str]:
+        """Parses a document path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documents/(?P<document>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def document_link_path(project: str,location: str,document: str,document_link: str,) -> str:
+        """Returns a fully-qualified document_link string."""
+        return "projects/{project}/locations/{location}/documents/{document}/documentLinks/{document_link}".format(project=project, location=location, document=document, document_link=document_link, )
+
+    @staticmethod
+    def parse_document_link_path(path: str) -> Dict[str,str]:
+        """Parses a document_link path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documents/(?P<document>.+?)/documentLinks/(?P<document_link>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = DocumentLinkServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            DocumentLinkServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentLinkServiceTransport, Callable[..., DocumentLinkServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document link service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentLinkServiceTransport,Callable[..., DocumentLinkServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentLinkServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DocumentLinkServiceClient._read_environment_variables()
+        self._client_cert_source = DocumentLinkServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = DocumentLinkServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, DocumentLinkServiceTransport)
+        if transport_provided:
+            # transport is a DocumentLinkServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(DocumentLinkServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            DocumentLinkServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[DocumentLinkServiceTransport], Callable[..., DocumentLinkServiceTransport]] = (
+                DocumentLinkServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., DocumentLinkServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def list_linked_targets(self,
+            request: Optional[Union[document_link_service.ListLinkedTargetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_link_service.ListLinkedTargetsResponse:
+        r"""Return all target document-links from the document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_list_linked_targets():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListLinkedTargetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = client.list_linked_targets(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.ListLinkedTargetsRequest, dict]):
+                The request object. Request message for
+                DocumentLinkService.ListLinkedTargets.
+            parent (str):
+                Required. The name of the document, for which all target
+                links are returned. Format:
+                projects/{project_number}/locations/{location}/documents/{target_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.ListLinkedTargetsResponse:
+                Response message for
+                DocumentLinkService.ListLinkedTargets.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.ListLinkedTargetsRequest):
+            request = document_link_service.ListLinkedTargetsRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_linked_targets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def list_linked_sources(self,
+            request: Optional[Union[document_link_service.ListLinkedSourcesRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListLinkedSourcesPager:
+        r"""Return all source document-links from the document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_list_linked_sources():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListLinkedSourcesRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_linked_sources(request=request)
+
+                # Handle the response
+                for response in page_result:
+                    print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest, dict]):
+                The request object. Response message for
+                DocumentLinkService.ListLinkedSources.
+            parent (str):
+                Required. The name of the document, for which all source
+                links are returned. Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_link_service.pagers.ListLinkedSourcesPager:
+                Response message for
+                DocumentLinkService.ListLinkedSources.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.ListLinkedSourcesRequest):
+            request = document_link_service.ListLinkedSourcesRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_linked_sources]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListLinkedSourcesPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def create_document_link(self,
+            request: Optional[Union[document_link_service.CreateDocumentLinkRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document_link: Optional[document_link_service.DocumentLink] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_link_service.DocumentLink:
+        r"""Create a link between a source document and a target
+        document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_create_document_link():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateDocumentLinkRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = client.create_document_link(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.CreateDocumentLinkRequest, dict]):
+                The request object. Request message for
+                DocumentLinkService.CreateDocumentLink.
+            parent (str):
+                Required. Parent of the document-link to be created.
+                parent of document-link should be a document. Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_link (google.cloud.contentwarehouse_v1.types.DocumentLink):
+                Required. Document links associated with the source
+                documents (source_document_id).
+
+                This corresponds to the ``document_link`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentLink:
+                A document-link between source and
+                target document.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document_link])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.CreateDocumentLinkRequest):
+            request = document_link_service.CreateDocumentLinkRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if document_link is not None:
+                request.document_link = document_link
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.create_document_link]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_document_link(self,
+            request: Optional[Union[document_link_service.DeleteDocumentLinkRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Remove the link between the source and target
+        documents.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_delete_document_link():
+                # Create a client
+                client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentLinkRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                client.delete_document_link(request=request)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentLinkRequest, dict]):
+                The request object. Request message for
+                DocumentLinkService.DeleteDocumentLink.
+            name (str):
+                Required. The name of the document-link to be deleted.
+                Format:
+                projects/{project_number}/locations/{location}/documents/{source_document_id}/documentLinks/{document_link_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_link_service.DeleteDocumentLinkRequest):
+            request = document_link_service.DeleteDocumentLinkRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.delete_document_link]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    def __enter__(self) -> "DocumentLinkServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentLinkServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/pagers.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/pagers.py
new file mode 100644
index 000000000000..baa927fdbbe7
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/pagers.py
@@ -0,0 +1,162 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+
+
+class ListLinkedSourcesPager:
+    """A pager for iterating through ``list_linked_sources`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``document_links`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListLinkedSources`` requests and continue to iterate
+    through the ``document_links`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., document_link_service.ListLinkedSourcesResponse],
+            request: document_link_service.ListLinkedSourcesRequest,
+            response: document_link_service.ListLinkedSourcesResponse,
+            *,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse):
+                The initial response object.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_link_service.ListLinkedSourcesRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterator[document_link_service.ListLinkedSourcesResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterator[document_link_service.DocumentLink]:
+        for page in self.pages:
+            yield from page.document_links
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
+
+
+class ListLinkedSourcesAsyncPager:
+    """A pager for iterating through ``list_linked_sources`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``document_links`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListLinkedSources`` requests and continue to iterate
+    through the ``document_links`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., Awaitable[document_link_service.ListLinkedSourcesResponse]],
+            request: document_link_service.ListLinkedSourcesRequest,
+            response: document_link_service.ListLinkedSourcesResponse,
+            *,
+            retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListLinkedSourcesResponse):
+                The initial response object.
+            retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_link_service.ListLinkedSourcesRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(self) -> AsyncIterator[document_link_service.ListLinkedSourcesResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+    def __aiter__(self) -> AsyncIterator[document_link_service.DocumentLink]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.document_links:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/README.rst
new file mode 100644
index 000000000000..4e3d9ba6d450
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`DocumentLinkServiceTransport` is the ABC for all transports.
+- public child `DocumentLinkServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `DocumentLinkServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseDocumentLinkServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `DocumentLinkServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py
new file mode 100644
index 000000000000..0f7c2c0b2806
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import DocumentLinkServiceTransport
+from .grpc import DocumentLinkServiceGrpcTransport
+from .grpc_asyncio import DocumentLinkServiceGrpcAsyncIOTransport
+from .rest import DocumentLinkServiceRestTransport
+from .rest import DocumentLinkServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentLinkServiceTransport]]
+_transport_registry['grpc'] = DocumentLinkServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = DocumentLinkServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = DocumentLinkServiceRestTransport
+
+__all__ = (
+    'DocumentLinkServiceTransport',
+    'DocumentLinkServiceGrpcTransport',
+    'DocumentLinkServiceGrpcAsyncIOTransport',
+    'DocumentLinkServiceRestTransport',
+    'DocumentLinkServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py
new file mode 100644
index 000000000000..0659695f7289
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/base.py
@@ -0,0 +1,212 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class DocumentLinkServiceTransport(abc.ABC):
+    """Abstract transport class for DocumentLinkService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.list_linked_targets: gapic_v1.method.wrap_method(
+                self.list_linked_targets,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.list_linked_sources: gapic_v1.method.wrap_method(
+                self.list_linked_sources,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.create_document_link: gapic_v1.method.wrap_method(
+                self.create_document_link,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.delete_document_link: gapic_v1.method.wrap_method(
+                self.delete_document_link,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def list_linked_targets(self) -> Callable[
+            [document_link_service.ListLinkedTargetsRequest],
+            Union[
+                document_link_service.ListLinkedTargetsResponse,
+                Awaitable[document_link_service.ListLinkedTargetsResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def list_linked_sources(self) -> Callable[
+            [document_link_service.ListLinkedSourcesRequest],
+            Union[
+                document_link_service.ListLinkedSourcesResponse,
+                Awaitable[document_link_service.ListLinkedSourcesResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def create_document_link(self) -> Callable[
+            [document_link_service.CreateDocumentLinkRequest],
+            Union[
+                document_link_service.DocumentLink,
+                Awaitable[document_link_service.DocumentLink]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def delete_document_link(self) -> Callable[
+            [document_link_service.DeleteDocumentLinkRequest],
+            Union[
+                empty_pb2.Empty,
+                Awaitable[empty_pb2.Empty]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'DocumentLinkServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py
new file mode 100644
index 000000000000..0eb4a5793212
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc.py
@@ -0,0 +1,372 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentLinkServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class DocumentLinkServiceGrpcTransport(DocumentLinkServiceTransport):
+    """gRPC backend transport for DocumentLinkService.
+
+    This service lets you manage document-links.
+    Document-Links are treated as sub-resources under source
+    documents.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def list_linked_targets(self) -> Callable[
+            [document_link_service.ListLinkedTargetsRequest],
+            document_link_service.ListLinkedTargetsResponse]:
+        r"""Return a callable for the list linked targets method over gRPC.
+
+        Return all target document-links from the document.
+
+        Returns:
+            Callable[[~.ListLinkedTargetsRequest],
+                    ~.ListLinkedTargetsResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_linked_targets' not in self._stubs:
+            self._stubs['list_linked_targets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/ListLinkedTargets',
+                request_serializer=document_link_service.ListLinkedTargetsRequest.serialize,
+                response_deserializer=document_link_service.ListLinkedTargetsResponse.deserialize,
+            )
+        return self._stubs['list_linked_targets']
+
+    @property
+    def list_linked_sources(self) -> Callable[
+            [document_link_service.ListLinkedSourcesRequest],
+            document_link_service.ListLinkedSourcesResponse]:
+        r"""Return a callable for the list linked sources method over gRPC.
+
+        Return all source document-links from the document.
+
+        Returns:
+            Callable[[~.ListLinkedSourcesRequest],
+                    ~.ListLinkedSourcesResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_linked_sources' not in self._stubs:
+            self._stubs['list_linked_sources'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/ListLinkedSources',
+                request_serializer=document_link_service.ListLinkedSourcesRequest.serialize,
+                response_deserializer=document_link_service.ListLinkedSourcesResponse.deserialize,
+            )
+        return self._stubs['list_linked_sources']
+
+    @property
+    def create_document_link(self) -> Callable[
+            [document_link_service.CreateDocumentLinkRequest],
+            document_link_service.DocumentLink]:
+        r"""Return a callable for the create document link method over gRPC.
+
+        Create a link between a source document and a target
+        document.
+
+        Returns:
+            Callable[[~.CreateDocumentLinkRequest],
+                    ~.DocumentLink]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document_link' not in self._stubs:
+            self._stubs['create_document_link'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/CreateDocumentLink',
+                request_serializer=document_link_service.CreateDocumentLinkRequest.serialize,
+                response_deserializer=document_link_service.DocumentLink.deserialize,
+            )
+        return self._stubs['create_document_link']
+
+    @property
+    def delete_document_link(self) -> Callable[
+            [document_link_service.DeleteDocumentLinkRequest],
+            empty_pb2.Empty]:
+        r"""Return a callable for the delete document link method over gRPC.
+
+        Remove the link between the source and target
+        documents.
+
+        Returns:
+            Callable[[~.DeleteDocumentLinkRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document_link' not in self._stubs:
+            self._stubs['delete_document_link'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/DeleteDocumentLink',
+                request_serializer=document_link_service.DeleteDocumentLinkRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document_link']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'DocumentLinkServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..bf5a72de37c2
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/grpc_asyncio.py
@@ -0,0 +1,413 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentLinkServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import DocumentLinkServiceGrpcTransport
+
+
+class DocumentLinkServiceGrpcAsyncIOTransport(DocumentLinkServiceTransport):
+    """gRPC AsyncIO backend transport for DocumentLinkService.
+
+    This service lets you manage document-links.
+    Document-Links are treated as sub-resources under source
+    documents.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def list_linked_targets(self) -> Callable[
+            [document_link_service.ListLinkedTargetsRequest],
+            Awaitable[document_link_service.ListLinkedTargetsResponse]]:
+        r"""Return a callable for the list linked targets method over gRPC.
+
+        Return all target document-links from the document.
+
+        Returns:
+            Callable[[~.ListLinkedTargetsRequest],
+                    Awaitable[~.ListLinkedTargetsResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_linked_targets' not in self._stubs:
+            self._stubs['list_linked_targets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/ListLinkedTargets',
+                request_serializer=document_link_service.ListLinkedTargetsRequest.serialize,
+                response_deserializer=document_link_service.ListLinkedTargetsResponse.deserialize,
+            )
+        return self._stubs['list_linked_targets']
+
+    @property
+    def list_linked_sources(self) -> Callable[
+            [document_link_service.ListLinkedSourcesRequest],
+            Awaitable[document_link_service.ListLinkedSourcesResponse]]:
+        r"""Return a callable for the list linked sources method over gRPC.
+
+        Return all source document-links from the document.
+
+        Returns:
+            Callable[[~.ListLinkedSourcesRequest],
+                    Awaitable[~.ListLinkedSourcesResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_linked_sources' not in self._stubs:
+            self._stubs['list_linked_sources'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/ListLinkedSources',
+                request_serializer=document_link_service.ListLinkedSourcesRequest.serialize,
+                response_deserializer=document_link_service.ListLinkedSourcesResponse.deserialize,
+            )
+        return self._stubs['list_linked_sources']
+
+    @property
+    def create_document_link(self) -> Callable[
+            [document_link_service.CreateDocumentLinkRequest],
+            Awaitable[document_link_service.DocumentLink]]:
+        r"""Return a callable for the create document link method over gRPC.
+
+        Create a link between a source document and a target
+        document.
+
+        Returns:
+            Callable[[~.CreateDocumentLinkRequest],
+                    Awaitable[~.DocumentLink]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document_link' not in self._stubs:
+            self._stubs['create_document_link'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/CreateDocumentLink',
+                request_serializer=document_link_service.CreateDocumentLinkRequest.serialize,
+                response_deserializer=document_link_service.DocumentLink.deserialize,
+            )
+        return self._stubs['create_document_link']
+
+    @property
+    def delete_document_link(self) -> Callable[
+            [document_link_service.DeleteDocumentLinkRequest],
+            Awaitable[empty_pb2.Empty]]:
+        r"""Return a callable for the delete document link method over gRPC.
+
+        Remove the link between the source and target
+        documents.
+
+        Returns:
+            Callable[[~.DeleteDocumentLinkRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document_link' not in self._stubs:
+            self._stubs['delete_document_link'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentLinkService/DeleteDocumentLink',
+                request_serializer=document_link_service.DeleteDocumentLinkRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document_link']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.list_linked_targets: self._wrap_method(
+                self.list_linked_targets,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.list_linked_sources: self._wrap_method(
+                self.list_linked_sources,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.create_document_link: self._wrap_method(
+                self.create_document_link,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.delete_document_link: self._wrap_method(
+                self.delete_document_link,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'DocumentLinkServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py
new file mode 100644
index 000000000000..3621c5b1aff6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py
@@ -0,0 +1,679 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BaseDocumentLinkServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class DocumentLinkServiceRestInterceptor:
+    """Interceptor for DocumentLinkService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the DocumentLinkServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomDocumentLinkServiceInterceptor(DocumentLinkServiceRestInterceptor):
+            def pre_create_document_link(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_create_document_link(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_delete_document_link(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def pre_list_linked_sources(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_list_linked_sources(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_list_linked_targets(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_list_linked_targets(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = DocumentLinkServiceRestTransport(interceptor=MyCustomDocumentLinkServiceInterceptor())
+        client = DocumentLinkServiceClient(transport=transport)
+
+
+    """
+    def pre_create_document_link(self, request: document_link_service.CreateDocumentLinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_link_service.CreateDocumentLinkRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for create_document_link
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentLinkService server.
+        """
+        return request, metadata
+
+    def post_create_document_link(self, response: document_link_service.DocumentLink) -> document_link_service.DocumentLink:
+        """Post-rpc interceptor for create_document_link
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentLinkService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_delete_document_link(self, request: document_link_service.DeleteDocumentLinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_link_service.DeleteDocumentLinkRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for delete_document_link
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentLinkService server.
+        """
+        return request, metadata
+
+    def pre_list_linked_sources(self, request: document_link_service.ListLinkedSourcesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_link_service.ListLinkedSourcesRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for list_linked_sources
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentLinkService server.
+        """
+        return request, metadata
+
+    def post_list_linked_sources(self, response: document_link_service.ListLinkedSourcesResponse) -> document_link_service.ListLinkedSourcesResponse:
+        """Post-rpc interceptor for list_linked_sources
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentLinkService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_list_linked_targets(self, request: document_link_service.ListLinkedTargetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_link_service.ListLinkedTargetsRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for list_linked_targets
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentLinkService server.
+        """
+        return request, metadata
+
+    def post_list_linked_targets(self, response: document_link_service.ListLinkedTargetsResponse) -> document_link_service.ListLinkedTargetsResponse:
+        """Post-rpc interceptor for list_linked_targets
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentLinkService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentLinkService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentLinkService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class DocumentLinkServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: DocumentLinkServiceRestInterceptor
+
+
+class DocumentLinkServiceRestTransport(_BaseDocumentLinkServiceRestTransport):
+    """REST backend synchronous transport for DocumentLinkService.
+
+    This service lets you manage document-links.
+    Document-Links are treated as sub-resources under source
+    documents.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[DocumentLinkServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or DocumentLinkServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    class _CreateDocumentLink(_BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink, DocumentLinkServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentLinkServiceRestTransport.CreateDocumentLink")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_link_service.CreateDocumentLinkRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_link_service.DocumentLink:
+            r"""Call the create document link method over HTTP.
+
+            Args:
+                request (~.document_link_service.CreateDocumentLinkRequest):
+                    The request object. Request message for
+                DocumentLinkService.CreateDocumentLink.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_link_service.DocumentLink:
+                    A document-link between source and
+                target document.
+
+            """
+
+            http_options = _BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink._get_http_options()
+            request, metadata = self._interceptor.pre_create_document_link(request, metadata)
+            transcoded_request = _BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentLinkServiceRestTransport._CreateDocumentLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_link_service.DocumentLink()
+            pb_resp = document_link_service.DocumentLink.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_create_document_link(resp)
+            return resp
+
+    class _DeleteDocumentLink(_BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink, DocumentLinkServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentLinkServiceRestTransport.DeleteDocumentLink")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_link_service.DeleteDocumentLinkRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ):
+            r"""Call the delete document link method over HTTP.
+
+            Args:
+                request (~.document_link_service.DeleteDocumentLinkRequest):
+                    The request object. Request message for
+                DocumentLinkService.DeleteDocumentLink.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+            """
+
+            http_options = _BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink._get_http_options()
+            request, metadata = self._interceptor.pre_delete_document_link(request, metadata)
+            transcoded_request = _BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentLinkServiceRestTransport._DeleteDocumentLink._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+    class _ListLinkedSources(_BaseDocumentLinkServiceRestTransport._BaseListLinkedSources, DocumentLinkServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentLinkServiceRestTransport.ListLinkedSources")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_link_service.ListLinkedSourcesRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_link_service.ListLinkedSourcesResponse:
+            r"""Call the list linked sources method over HTTP.
+
+            Args:
+                request (~.document_link_service.ListLinkedSourcesRequest):
+                    The request object. Response message for
+                DocumentLinkService.ListLinkedSources.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_link_service.ListLinkedSourcesResponse:
+                    Response message for
+                DocumentLinkService.ListLinkedSources.
+
+            """
+
+            http_options = _BaseDocumentLinkServiceRestTransport._BaseListLinkedSources._get_http_options()
+            request, metadata = self._interceptor.pre_list_linked_sources(request, metadata)
+            transcoded_request = _BaseDocumentLinkServiceRestTransport._BaseListLinkedSources._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentLinkServiceRestTransport._BaseListLinkedSources._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentLinkServiceRestTransport._BaseListLinkedSources._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentLinkServiceRestTransport._ListLinkedSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_link_service.ListLinkedSourcesResponse()
+            pb_resp = document_link_service.ListLinkedSourcesResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_list_linked_sources(resp)
+            return resp
+
+    class _ListLinkedTargets(_BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets, DocumentLinkServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentLinkServiceRestTransport.ListLinkedTargets")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_link_service.ListLinkedTargetsRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_link_service.ListLinkedTargetsResponse:
+            r"""Call the list linked targets method over HTTP.
+
+            Args:
+                request (~.document_link_service.ListLinkedTargetsRequest):
+                    The request object. Request message for
+                DocumentLinkService.ListLinkedTargets.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_link_service.ListLinkedTargetsResponse:
+                    Response message for
+                DocumentLinkService.ListLinkedTargets.
+
+            """
+
+            http_options = _BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets._get_http_options()
+            request, metadata = self._interceptor.pre_list_linked_targets(request, metadata)
+            transcoded_request = _BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentLinkServiceRestTransport._ListLinkedTargets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_link_service.ListLinkedTargetsResponse()
+            pb_resp = document_link_service.ListLinkedTargetsResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_list_linked_targets(resp)
+            return resp
+
+    @property
+    def create_document_link(self) -> Callable[
+            [document_link_service.CreateDocumentLinkRequest],
+            document_link_service.DocumentLink]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._CreateDocumentLink(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def delete_document_link(self) -> Callable[
+            [document_link_service.DeleteDocumentLinkRequest],
+            empty_pb2.Empty]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._DeleteDocumentLink(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def list_linked_sources(self) -> Callable[
+            [document_link_service.ListLinkedSourcesRequest],
+            document_link_service.ListLinkedSourcesResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._ListLinkedSources(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def list_linked_targets(self) -> Callable[
+            [document_link_service.ListLinkedTargetsRequest],
+            document_link_service.ListLinkedTargetsResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._ListLinkedTargets(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BaseDocumentLinkServiceRestTransport._BaseGetOperation, DocumentLinkServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentLinkServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BaseDocumentLinkServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BaseDocumentLinkServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentLinkServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentLinkServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'DocumentLinkServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest_base.py
new file mode 100644
index 000000000000..80c668c60bbb
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest_base.py
@@ -0,0 +1,306 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import DocumentLinkServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BaseDocumentLinkServiceRestTransport(DocumentLinkServiceTransport):
+    """Base REST backend transport for DocumentLinkService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseCreateDocumentLink:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*/documents/*}/documentLinks',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_link_service.CreateDocumentLinkRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentLinkServiceRestTransport._BaseCreateDocumentLink._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseDeleteDocumentLink:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/*/documentLinks/*}:delete',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_link_service.DeleteDocumentLinkRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentLinkServiceRestTransport._BaseDeleteDocumentLink._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseListLinkedSources:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*/documents/*}/linkedSources',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_link_service.ListLinkedSourcesRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentLinkServiceRestTransport._BaseListLinkedSources._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseListLinkedTargets:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*/documents/*}/linkedTargets',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_link_service.ListLinkedTargetsRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentLinkServiceRestTransport._BaseListLinkedTargets._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BaseDocumentLinkServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/__init__.py
new file mode 100644
index 000000000000..f057810d00db
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import DocumentSchemaServiceClient
+from .async_client import DocumentSchemaServiceAsyncClient
+
+__all__ = (
+    'DocumentSchemaServiceClient',
+    'DocumentSchemaServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py
new file mode 100644
index 000000000000..52bc4f37d8aa
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/async_client.py
@@ -0,0 +1,849 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_schema_service import pagers
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentSchemaServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import DocumentSchemaServiceGrpcAsyncIOTransport
+from .client import DocumentSchemaServiceClient
+
+
+class DocumentSchemaServiceAsyncClient:
+    """This service lets you manage document schema."""
+
+    _client: DocumentSchemaServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = DocumentSchemaServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = DocumentSchemaServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+
+    document_schema_path = staticmethod(DocumentSchemaServiceClient.document_schema_path)
+    parse_document_schema_path = staticmethod(DocumentSchemaServiceClient.parse_document_schema_path)
+    location_path = staticmethod(DocumentSchemaServiceClient.location_path)
+    parse_location_path = staticmethod(DocumentSchemaServiceClient.parse_location_path)
+    common_billing_account_path = staticmethod(DocumentSchemaServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(DocumentSchemaServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(DocumentSchemaServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(DocumentSchemaServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(DocumentSchemaServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(DocumentSchemaServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(DocumentSchemaServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(DocumentSchemaServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(DocumentSchemaServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(DocumentSchemaServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentSchemaServiceAsyncClient: The constructed client.
+        """
+        return DocumentSchemaServiceClient.from_service_account_info.__func__(DocumentSchemaServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentSchemaServiceAsyncClient: The constructed client.
+        """
+        return DocumentSchemaServiceClient.from_service_account_file.__func__(DocumentSchemaServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return DocumentSchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> DocumentSchemaServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentSchemaServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = DocumentSchemaServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentSchemaServiceTransport, Callable[..., DocumentSchemaServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document schema service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentSchemaServiceTransport,Callable[..., DocumentSchemaServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentSchemaServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = DocumentSchemaServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def create_document_schema(self,
+            request: Optional[Union[document_schema_service.CreateDocumentSchemaRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document_schema: Optional[gcc_document_schema.DocumentSchema] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document_schema.DocumentSchema:
+        r"""Creates a document schema.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_create_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+                # Initialize request argument(s)
+                document_schema = contentwarehouse_v1.DocumentSchema()
+                document_schema.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.CreateDocumentSchemaRequest(
+                    parent="parent_value",
+                    document_schema=document_schema,
+                )
+
+                # Make the request
+                response = await client.create_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.CreateDocumentSchemaRequest, dict]]):
+                The request object. Request message for
+                DocumentSchemaService.CreateDocumentSchema.
+            parent (:class:`str`):
+                Required. The parent name.
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_schema (:class:`google.cloud.contentwarehouse_v1.types.DocumentSchema`):
+                Required. The document schema to
+                create.
+
+                This corresponds to the ``document_schema`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document_schema])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.CreateDocumentSchemaRequest):
+            request = document_schema_service.CreateDocumentSchemaRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if document_schema is not None:
+            request.document_schema = document_schema
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.create_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def update_document_schema(self,
+            request: Optional[Union[document_schema_service.UpdateDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            document_schema: Optional[gcc_document_schema.DocumentSchema] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document_schema.DocumentSchema:
+        r"""Updates a Document Schema. Returns INVALID_ARGUMENT if the name
+        of the Document Schema is non-empty and does not equal the
+        existing name. Supports only appending new properties, adding
+        new ENUM possible values, and updating the
+        [EnumTypeOptions.validation_check_disabled][google.cloud.contentwarehouse.v1.EnumTypeOptions.validation_check_disabled]
+        flag for ENUM possible values. Updating existing properties will
+        result into INVALID_ARGUMENT.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_update_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+                # Initialize request argument(s)
+                document_schema = contentwarehouse_v1.DocumentSchema()
+                document_schema.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.UpdateDocumentSchemaRequest(
+                    name="name_value",
+                    document_schema=document_schema,
+                )
+
+                # Make the request
+                response = await client.update_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.UpdateDocumentSchemaRequest, dict]]):
+                The request object. Request message for
+                DocumentSchemaService.UpdateDocumentSchema.
+            name (:class:`str`):
+                Required. The name of the document schema to update.
+                Format:
+                projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_schema (:class:`google.cloud.contentwarehouse_v1.types.DocumentSchema`):
+                Required. The document schema to
+                update with.
+
+                This corresponds to the ``document_schema`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, document_schema])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.UpdateDocumentSchemaRequest):
+            request = document_schema_service.UpdateDocumentSchemaRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+        if document_schema is not None:
+            request.document_schema = document_schema
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.update_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_document_schema(self,
+            request: Optional[Union[document_schema_service.GetDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_schema.DocumentSchema:
+        r"""Gets a document schema. Returns NOT_FOUND if the document schema
+        does not exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_get_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetDocumentSchemaRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.get_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.GetDocumentSchemaRequest, dict]]):
+                The request object. Request message for
+                DocumentSchemaService.GetDocumentSchema.
+            name (:class:`str`):
+                Required. The name of the document
+                schema to retrieve.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.GetDocumentSchemaRequest):
+            request = document_schema_service.GetDocumentSchemaRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.get_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_document_schema(self,
+            request: Optional[Union[document_schema_service.DeleteDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a document schema. Returns NOT_FOUND if the document
+        schema does not exist. Returns BAD_REQUEST if the document
+        schema has documents depending on it.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_delete_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentSchemaRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                await client.delete_document_schema(request=request)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentSchemaRequest, dict]]):
+                The request object. Request message for
+                DocumentSchemaService.DeleteDocumentSchema.
+            name (:class:`str`):
+                Required. The name of the document
+                schema to delete.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.DeleteDocumentSchemaRequest):
+            request = document_schema_service.DeleteDocumentSchemaRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.delete_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    async def list_document_schemas(self,
+            request: Optional[Union[document_schema_service.ListDocumentSchemasRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListDocumentSchemasAsyncPager:
+        r"""Lists document schemas.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_list_document_schemas():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListDocumentSchemasRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_document_schemas(request=request)
+
+                # Handle the response
+                async for response in page_result:
+                    print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest, dict]]):
+                The request object. Request message for
+                DocumentSchemaService.ListDocumentSchemas.
+            parent (:class:`str`):
+                Required. The parent, which owns this collection of
+                document schemas. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_schema_service.pagers.ListDocumentSchemasAsyncPager:
+                Response message for
+                DocumentSchemaService.ListDocumentSchemas.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.ListDocumentSchemasRequest):
+            request = document_schema_service.ListDocumentSchemasRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.list_document_schemas]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListDocumentSchemasAsyncPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "DocumentSchemaServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentSchemaServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py
new file mode 100644
index 000000000000..bf485c2e9ad4
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py
@@ -0,0 +1,1212 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_schema_service import pagers
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentSchemaServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import DocumentSchemaServiceGrpcTransport
+from .transports.grpc_asyncio import DocumentSchemaServiceGrpcAsyncIOTransport
+from .transports.rest import DocumentSchemaServiceRestTransport
+
+
+class DocumentSchemaServiceClientMeta(type):
+    """Metaclass for the DocumentSchemaService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentSchemaServiceTransport]]
+    _transport_registry["grpc"] = DocumentSchemaServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = DocumentSchemaServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = DocumentSchemaServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[DocumentSchemaServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class DocumentSchemaServiceClient(metaclass=DocumentSchemaServiceClientMeta):
+    """This service lets you manage document schema."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentSchemaServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentSchemaServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> DocumentSchemaServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentSchemaServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def document_schema_path(project: str,location: str,document_schema: str,) -> str:
+        """Returns a fully-qualified document_schema string."""
+        return "projects/{project}/locations/{location}/documentSchemas/{document_schema}".format(project=project, location=location, document_schema=document_schema, )
+
+    @staticmethod
+    def parse_document_schema_path(path: str) -> Dict[str,str]:
+        """Parses a document_schema path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documentSchemas/(?P<document_schema>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def location_path(project: str,location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_location_path(path: str) -> Dict[str,str]:
+        """Parses a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = DocumentSchemaServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            DocumentSchemaServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentSchemaServiceTransport, Callable[..., DocumentSchemaServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document schema service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentSchemaServiceTransport,Callable[..., DocumentSchemaServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentSchemaServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DocumentSchemaServiceClient._read_environment_variables()
+        self._client_cert_source = DocumentSchemaServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = DocumentSchemaServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, DocumentSchemaServiceTransport)
+        if transport_provided:
+            # transport is a DocumentSchemaServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(DocumentSchemaServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            DocumentSchemaServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[DocumentSchemaServiceTransport], Callable[..., DocumentSchemaServiceTransport]] = (
+                DocumentSchemaServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., DocumentSchemaServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def create_document_schema(self,
+            request: Optional[Union[document_schema_service.CreateDocumentSchemaRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document_schema: Optional[gcc_document_schema.DocumentSchema] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document_schema.DocumentSchema:
+        r"""Creates a document schema.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_create_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+                # Initialize request argument(s)
+                document_schema = contentwarehouse_v1.DocumentSchema()
+                document_schema.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.CreateDocumentSchemaRequest(
+                    parent="parent_value",
+                    document_schema=document_schema,
+                )
+
+                # Make the request
+                response = client.create_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.CreateDocumentSchemaRequest, dict]):
+                The request object. Request message for
+                DocumentSchemaService.CreateDocumentSchema.
+            parent (str):
+                Required. The parent name.
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_schema (google.cloud.contentwarehouse_v1.types.DocumentSchema):
+                Required. The document schema to
+                create.
+
+                This corresponds to the ``document_schema`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document_schema])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.CreateDocumentSchemaRequest):
+            request = document_schema_service.CreateDocumentSchemaRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if document_schema is not None:
+                request.document_schema = document_schema
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.create_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def update_document_schema(self,
+            request: Optional[Union[document_schema_service.UpdateDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            document_schema: Optional[gcc_document_schema.DocumentSchema] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document_schema.DocumentSchema:
+        r"""Updates a Document Schema. Returns INVALID_ARGUMENT if the name
+        of the Document Schema is non-empty and does not equal the
+        existing name. Supports only appending new properties, adding
+        new ENUM possible values, and updating the
+        [EnumTypeOptions.validation_check_disabled][google.cloud.contentwarehouse.v1.EnumTypeOptions.validation_check_disabled]
+        flag for ENUM possible values. Updating existing properties will
+        result into INVALID_ARGUMENT.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_update_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+                # Initialize request argument(s)
+                document_schema = contentwarehouse_v1.DocumentSchema()
+                document_schema.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.UpdateDocumentSchemaRequest(
+                    name="name_value",
+                    document_schema=document_schema,
+                )
+
+                # Make the request
+                response = client.update_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.UpdateDocumentSchemaRequest, dict]):
+                The request object. Request message for
+                DocumentSchemaService.UpdateDocumentSchema.
+            name (str):
+                Required. The name of the document schema to update.
+                Format:
+                projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document_schema (google.cloud.contentwarehouse_v1.types.DocumentSchema):
+                Required. The document schema to
+                update with.
+
+                This corresponds to the ``document_schema`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, document_schema])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.UpdateDocumentSchemaRequest):
+            request = document_schema_service.UpdateDocumentSchemaRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+            if document_schema is not None:
+                request.document_schema = document_schema
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.update_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def get_document_schema(self,
+            request: Optional[Union[document_schema_service.GetDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_schema.DocumentSchema:
+        r"""Gets a document schema. Returns NOT_FOUND if the document schema
+        does not exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_get_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetDocumentSchemaRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.get_document_schema(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.GetDocumentSchemaRequest, dict]):
+                The request object. Request message for
+                DocumentSchemaService.GetDocumentSchema.
+            name (str):
+                Required. The name of the document
+                schema to retrieve.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.DocumentSchema:
+                A document schema used to define
+                document structure.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.GetDocumentSchemaRequest):
+            request = document_schema_service.GetDocumentSchemaRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_document_schema(self,
+            request: Optional[Union[document_schema_service.DeleteDocumentSchemaRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a document schema. Returns NOT_FOUND if the document
+        schema does not exist. Returns BAD_REQUEST if the document
+        schema has documents depending on it.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_delete_document_schema():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentSchemaRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                client.delete_document_schema(request=request)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentSchemaRequest, dict]):
+                The request object. Request message for
+                DocumentSchemaService.DeleteDocumentSchema.
+            name (str):
+                Required. The name of the document
+                schema to delete.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.DeleteDocumentSchemaRequest):
+            request = document_schema_service.DeleteDocumentSchemaRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.delete_document_schema]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    def list_document_schemas(self,
+            request: Optional[Union[document_schema_service.ListDocumentSchemasRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListDocumentSchemasPager:
+        r"""Lists document schemas.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_list_document_schemas():
+                # Create a client
+                client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListDocumentSchemasRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_document_schemas(request=request)
+
+                # Handle the response
+                for response in page_result:
+                    print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest, dict]):
+                The request object. Request message for
+                DocumentSchemaService.ListDocumentSchemas.
+            parent (str):
+                Required. The parent, which owns this collection of
+                document schemas. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_schema_service.pagers.ListDocumentSchemasPager:
+                Response message for
+                DocumentSchemaService.ListDocumentSchemas.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_schema_service.ListDocumentSchemasRequest):
+            request = document_schema_service.ListDocumentSchemasRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_document_schemas]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListDocumentSchemasPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def __enter__(self) -> "DocumentSchemaServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentSchemaServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/pagers.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/pagers.py
new file mode 100644
index 000000000000..23b14e5750ba
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/pagers.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+
+
+class ListDocumentSchemasPager:
+    """A pager for iterating through ``list_document_schemas`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``document_schemas`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListDocumentSchemas`` requests and continue to iterate
+    through the ``document_schemas`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., document_schema_service.ListDocumentSchemasResponse],
+            request: document_schema_service.ListDocumentSchemasRequest,
+            response: document_schema_service.ListDocumentSchemasResponse,
+            *,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse):
+                The initial response object.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_schema_service.ListDocumentSchemasRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterator[document_schema_service.ListDocumentSchemasResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterator[document_schema.DocumentSchema]:
+        for page in self.pages:
+            yield from page.document_schemas
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
+
+
+class ListDocumentSchemasAsyncPager:
+    """A pager for iterating through ``list_document_schemas`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``document_schemas`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListDocumentSchemas`` requests and continue to iterate
+    through the ``document_schemas`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., Awaitable[document_schema_service.ListDocumentSchemasResponse]],
+            request: document_schema_service.ListDocumentSchemasRequest,
+            response: document_schema_service.ListDocumentSchemasResponse,
+            *,
+            retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListDocumentSchemasResponse):
+                The initial response object.
+            retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_schema_service.ListDocumentSchemasRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(self) -> AsyncIterator[document_schema_service.ListDocumentSchemasResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+    def __aiter__(self) -> AsyncIterator[document_schema.DocumentSchema]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.document_schemas:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/README.rst
new file mode 100644
index 000000000000..fd0bc7e8c28b
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`DocumentSchemaServiceTransport` is the ABC for all transports.
+- public child `DocumentSchemaServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `DocumentSchemaServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseDocumentSchemaServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `DocumentSchemaServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py
new file mode 100644
index 000000000000..2659a346e9e4
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import DocumentSchemaServiceTransport
+from .grpc import DocumentSchemaServiceGrpcTransport
+from .grpc_asyncio import DocumentSchemaServiceGrpcAsyncIOTransport
+from .rest import DocumentSchemaServiceRestTransport
+from .rest import DocumentSchemaServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentSchemaServiceTransport]]
+_transport_registry['grpc'] = DocumentSchemaServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = DocumentSchemaServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = DocumentSchemaServiceRestTransport
+
+__all__ = (
+    'DocumentSchemaServiceTransport',
+    'DocumentSchemaServiceGrpcTransport',
+    'DocumentSchemaServiceGrpcAsyncIOTransport',
+    'DocumentSchemaServiceRestTransport',
+    'DocumentSchemaServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py
new file mode 100644
index 000000000000..f4eaad5d4d3e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/base.py
@@ -0,0 +1,246 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class DocumentSchemaServiceTransport(abc.ABC):
+    """Abstract transport class for DocumentSchemaService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.create_document_schema: gapic_v1.method.wrap_method(
+                self.create_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_document_schema: gapic_v1.method.wrap_method(
+                self.update_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_document_schema: gapic_v1.method.wrap_method(
+                self.get_document_schema,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_document_schema: gapic_v1.method.wrap_method(
+                self.delete_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_document_schemas: gapic_v1.method.wrap_method(
+                self.list_document_schemas,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def create_document_schema(self) -> Callable[
+            [document_schema_service.CreateDocumentSchemaRequest],
+            Union[
+                gcc_document_schema.DocumentSchema,
+                Awaitable[gcc_document_schema.DocumentSchema]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def update_document_schema(self) -> Callable[
+            [document_schema_service.UpdateDocumentSchemaRequest],
+            Union[
+                gcc_document_schema.DocumentSchema,
+                Awaitable[gcc_document_schema.DocumentSchema]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_document_schema(self) -> Callable[
+            [document_schema_service.GetDocumentSchemaRequest],
+            Union[
+                document_schema.DocumentSchema,
+                Awaitable[document_schema.DocumentSchema]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def delete_document_schema(self) -> Callable[
+            [document_schema_service.DeleteDocumentSchemaRequest],
+            Union[
+                empty_pb2.Empty,
+                Awaitable[empty_pb2.Empty]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def list_document_schemas(self) -> Callable[
+            [document_schema_service.ListDocumentSchemasRequest],
+            Union[
+                document_schema_service.ListDocumentSchemasResponse,
+                Awaitable[document_schema_service.ListDocumentSchemasResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'DocumentSchemaServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py
new file mode 100644
index 000000000000..1fd781c7c4b7
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc.py
@@ -0,0 +1,405 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentSchemaServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class DocumentSchemaServiceGrpcTransport(DocumentSchemaServiceTransport):
+    """gRPC backend transport for DocumentSchemaService.
+
+    This service lets you manage document schema.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def create_document_schema(self) -> Callable[
+            [document_schema_service.CreateDocumentSchemaRequest],
+            gcc_document_schema.DocumentSchema]:
+        r"""Return a callable for the create document schema method over gRPC.
+
+        Creates a document schema.
+
+        Returns:
+            Callable[[~.CreateDocumentSchemaRequest],
+                    ~.DocumentSchema]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document_schema' not in self._stubs:
+            self._stubs['create_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/CreateDocumentSchema',
+                request_serializer=document_schema_service.CreateDocumentSchemaRequest.serialize,
+                response_deserializer=gcc_document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['create_document_schema']
+
+    @property
+    def update_document_schema(self) -> Callable[
+            [document_schema_service.UpdateDocumentSchemaRequest],
+            gcc_document_schema.DocumentSchema]:
+        r"""Return a callable for the update document schema method over gRPC.
+
+        Updates a Document Schema. Returns INVALID_ARGUMENT if the name
+        of the Document Schema is non-empty and does not equal the
+        existing name. Supports only appending new properties, adding
+        new ENUM possible values, and updating the
+        [EnumTypeOptions.validation_check_disabled][google.cloud.contentwarehouse.v1.EnumTypeOptions.validation_check_disabled]
+        flag for ENUM possible values. Updating existing properties will
+        result into INVALID_ARGUMENT.
+
+        Returns:
+            Callable[[~.UpdateDocumentSchemaRequest],
+                    ~.DocumentSchema]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_document_schema' not in self._stubs:
+            self._stubs['update_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/UpdateDocumentSchema',
+                request_serializer=document_schema_service.UpdateDocumentSchemaRequest.serialize,
+                response_deserializer=gcc_document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['update_document_schema']
+
+    @property
+    def get_document_schema(self) -> Callable[
+            [document_schema_service.GetDocumentSchemaRequest],
+            document_schema.DocumentSchema]:
+        r"""Return a callable for the get document schema method over gRPC.
+
+        Gets a document schema. Returns NOT_FOUND if the document schema
+        does not exist.
+
+        Returns:
+            Callable[[~.GetDocumentSchemaRequest],
+                    ~.DocumentSchema]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_document_schema' not in self._stubs:
+            self._stubs['get_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/GetDocumentSchema',
+                request_serializer=document_schema_service.GetDocumentSchemaRequest.serialize,
+                response_deserializer=document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['get_document_schema']
+
+    @property
+    def delete_document_schema(self) -> Callable[
+            [document_schema_service.DeleteDocumentSchemaRequest],
+            empty_pb2.Empty]:
+        r"""Return a callable for the delete document schema method over gRPC.
+
+        Deletes a document schema. Returns NOT_FOUND if the document
+        schema does not exist. Returns BAD_REQUEST if the document
+        schema has documents depending on it.
+
+        Returns:
+            Callable[[~.DeleteDocumentSchemaRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document_schema' not in self._stubs:
+            self._stubs['delete_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/DeleteDocumentSchema',
+                request_serializer=document_schema_service.DeleteDocumentSchemaRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document_schema']
+
+    @property
+    def list_document_schemas(self) -> Callable[
+            [document_schema_service.ListDocumentSchemasRequest],
+            document_schema_service.ListDocumentSchemasResponse]:
+        r"""Return a callable for the list document schemas method over gRPC.
+
+        Lists document schemas.
+
+        Returns:
+            Callable[[~.ListDocumentSchemasRequest],
+                    ~.ListDocumentSchemasResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_document_schemas' not in self._stubs:
+            self._stubs['list_document_schemas'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/ListDocumentSchemas',
+                request_serializer=document_schema_service.ListDocumentSchemasRequest.serialize,
+                response_deserializer=document_schema_service.ListDocumentSchemasResponse.deserialize,
+            )
+        return self._stubs['list_document_schemas']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'DocumentSchemaServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..e594c65ec129
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/grpc_asyncio.py
@@ -0,0 +1,469 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentSchemaServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import DocumentSchemaServiceGrpcTransport
+
+
+class DocumentSchemaServiceGrpcAsyncIOTransport(DocumentSchemaServiceTransport):
+    """gRPC AsyncIO backend transport for DocumentSchemaService.
+
+    This service lets you manage document schema.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def create_document_schema(self) -> Callable[
+            [document_schema_service.CreateDocumentSchemaRequest],
+            Awaitable[gcc_document_schema.DocumentSchema]]:
+        r"""Return a callable for the create document schema method over gRPC.
+
+        Creates a document schema.
+
+        Returns:
+            Callable[[~.CreateDocumentSchemaRequest],
+                    Awaitable[~.DocumentSchema]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document_schema' not in self._stubs:
+            self._stubs['create_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/CreateDocumentSchema',
+                request_serializer=document_schema_service.CreateDocumentSchemaRequest.serialize,
+                response_deserializer=gcc_document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['create_document_schema']
+
+    @property
+    def update_document_schema(self) -> Callable[
+            [document_schema_service.UpdateDocumentSchemaRequest],
+            Awaitable[gcc_document_schema.DocumentSchema]]:
+        r"""Return a callable for the update document schema method over gRPC.
+
+        Updates a Document Schema. Returns INVALID_ARGUMENT if the name
+        of the Document Schema is non-empty and does not equal the
+        existing name. Supports only appending new properties, adding
+        new ENUM possible values, and updating the
+        [EnumTypeOptions.validation_check_disabled][google.cloud.contentwarehouse.v1.EnumTypeOptions.validation_check_disabled]
+        flag for ENUM possible values. Updating existing properties will
+        result into INVALID_ARGUMENT.
+
+        Returns:
+            Callable[[~.UpdateDocumentSchemaRequest],
+                    Awaitable[~.DocumentSchema]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_document_schema' not in self._stubs:
+            self._stubs['update_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/UpdateDocumentSchema',
+                request_serializer=document_schema_service.UpdateDocumentSchemaRequest.serialize,
+                response_deserializer=gcc_document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['update_document_schema']
+
+    @property
+    def get_document_schema(self) -> Callable[
+            [document_schema_service.GetDocumentSchemaRequest],
+            Awaitable[document_schema.DocumentSchema]]:
+        r"""Return a callable for the get document schema method over gRPC.
+
+        Gets a document schema. Returns NOT_FOUND if the document schema
+        does not exist.
+
+        Returns:
+            Callable[[~.GetDocumentSchemaRequest],
+                    Awaitable[~.DocumentSchema]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_document_schema' not in self._stubs:
+            self._stubs['get_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/GetDocumentSchema',
+                request_serializer=document_schema_service.GetDocumentSchemaRequest.serialize,
+                response_deserializer=document_schema.DocumentSchema.deserialize,
+            )
+        return self._stubs['get_document_schema']
+
+    @property
+    def delete_document_schema(self) -> Callable[
+            [document_schema_service.DeleteDocumentSchemaRequest],
+            Awaitable[empty_pb2.Empty]]:
+        r"""Return a callable for the delete document schema method over gRPC.
+
+        Deletes a document schema. Returns NOT_FOUND if the document
+        schema does not exist. Returns BAD_REQUEST if the document
+        schema has documents depending on it.
+
+        Returns:
+            Callable[[~.DeleteDocumentSchemaRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document_schema' not in self._stubs:
+            self._stubs['delete_document_schema'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/DeleteDocumentSchema',
+                request_serializer=document_schema_service.DeleteDocumentSchemaRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document_schema']
+
+    @property
+    def list_document_schemas(self) -> Callable[
+            [document_schema_service.ListDocumentSchemasRequest],
+            Awaitable[document_schema_service.ListDocumentSchemasResponse]]:
+        r"""Return a callable for the list document schemas method over gRPC.
+
+        Lists document schemas.
+
+        Returns:
+            Callable[[~.ListDocumentSchemasRequest],
+                    Awaitable[~.ListDocumentSchemasResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_document_schemas' not in self._stubs:
+            self._stubs['list_document_schemas'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentSchemaService/ListDocumentSchemas',
+                request_serializer=document_schema_service.ListDocumentSchemasRequest.serialize,
+                response_deserializer=document_schema_service.ListDocumentSchemasResponse.deserialize,
+            )
+        return self._stubs['list_document_schemas']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.create_document_schema: self._wrap_method(
+                self.create_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_document_schema: self._wrap_method(
+                self.update_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_document_schema: self._wrap_method(
+                self.get_document_schema,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_document_schema: self._wrap_method(
+                self.delete_document_schema,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_document_schemas: self._wrap_method(
+                self.list_document_schemas,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'DocumentSchemaServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py
new file mode 100644
index 000000000000..e2064643c0bb
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py
@@ -0,0 +1,780 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BaseDocumentSchemaServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class DocumentSchemaServiceRestInterceptor:
+    """Interceptor for DocumentSchemaService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the DocumentSchemaServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomDocumentSchemaServiceInterceptor(DocumentSchemaServiceRestInterceptor):
+            def pre_create_document_schema(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_create_document_schema(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_delete_document_schema(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def pre_get_document_schema(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_get_document_schema(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_list_document_schemas(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_list_document_schemas(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_update_document_schema(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_update_document_schema(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = DocumentSchemaServiceRestTransport(interceptor=MyCustomDocumentSchemaServiceInterceptor())
+        client = DocumentSchemaServiceClient(transport=transport)
+
+
+    """
+    def pre_create_document_schema(self, request: document_schema_service.CreateDocumentSchemaRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_schema_service.CreateDocumentSchemaRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for create_document_schema
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def post_create_document_schema(self, response: gcc_document_schema.DocumentSchema) -> gcc_document_schema.DocumentSchema:
+        """Post-rpc interceptor for create_document_schema
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentSchemaService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_delete_document_schema(self, request: document_schema_service.DeleteDocumentSchemaRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_schema_service.DeleteDocumentSchemaRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for delete_document_schema
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def pre_get_document_schema(self, request: document_schema_service.GetDocumentSchemaRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_schema_service.GetDocumentSchemaRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_document_schema
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def post_get_document_schema(self, response: document_schema.DocumentSchema) -> document_schema.DocumentSchema:
+        """Post-rpc interceptor for get_document_schema
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentSchemaService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_list_document_schemas(self, request: document_schema_service.ListDocumentSchemasRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_schema_service.ListDocumentSchemasRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for list_document_schemas
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def post_list_document_schemas(self, response: document_schema_service.ListDocumentSchemasResponse) -> document_schema_service.ListDocumentSchemasResponse:
+        """Post-rpc interceptor for list_document_schemas
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentSchemaService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_update_document_schema(self, request: document_schema_service.UpdateDocumentSchemaRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_schema_service.UpdateDocumentSchemaRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for update_document_schema
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def post_update_document_schema(self, response: gcc_document_schema.DocumentSchema) -> gcc_document_schema.DocumentSchema:
+        """Post-rpc interceptor for update_document_schema
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentSchemaService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentSchemaService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentSchemaService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class DocumentSchemaServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: DocumentSchemaServiceRestInterceptor
+
+
+class DocumentSchemaServiceRestTransport(_BaseDocumentSchemaServiceRestTransport):
+    """REST backend synchronous transport for DocumentSchemaService.
+
+    This service lets you manage document schema.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[DocumentSchemaServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or DocumentSchemaServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    class _CreateDocumentSchema(_BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.CreateDocumentSchema")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_schema_service.CreateDocumentSchemaRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> gcc_document_schema.DocumentSchema:
+            r"""Call the create document schema method over HTTP.
+
+            Args:
+                request (~.document_schema_service.CreateDocumentSchemaRequest):
+                    The request object. Request message for
+                DocumentSchemaService.CreateDocumentSchema.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.gcc_document_schema.DocumentSchema:
+                    A document schema used to define
+                document structure.
+
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema._get_http_options()
+            request, metadata = self._interceptor.pre_create_document_schema(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._CreateDocumentSchema._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = gcc_document_schema.DocumentSchema()
+            pb_resp = gcc_document_schema.DocumentSchema.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_create_document_schema(resp)
+            return resp
+
+    class _DeleteDocumentSchema(_BaseDocumentSchemaServiceRestTransport._BaseDeleteDocumentSchema, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.DeleteDocumentSchema")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: document_schema_service.DeleteDocumentSchemaRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ):
+            r"""Call the delete document schema method over HTTP.
+
+            Args:
+                request (~.document_schema_service.DeleteDocumentSchemaRequest):
+                    The request object. Request message for
+                DocumentSchemaService.DeleteDocumentSchema.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseDeleteDocumentSchema._get_http_options()
+            request, metadata = self._interceptor.pre_delete_document_schema(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseDeleteDocumentSchema._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseDeleteDocumentSchema._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._DeleteDocumentSchema._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+    class _GetDocumentSchema(_BaseDocumentSchemaServiceRestTransport._BaseGetDocumentSchema, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.GetDocumentSchema")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: document_schema_service.GetDocumentSchemaRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_schema.DocumentSchema:
+            r"""Call the get document schema method over HTTP.
+
+            Args:
+                request (~.document_schema_service.GetDocumentSchemaRequest):
+                    The request object. Request message for
+                DocumentSchemaService.GetDocumentSchema.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_schema.DocumentSchema:
+                    A document schema used to define
+                document structure.
+
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseGetDocumentSchema._get_http_options()
+            request, metadata = self._interceptor.pre_get_document_schema(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseGetDocumentSchema._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseGetDocumentSchema._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._GetDocumentSchema._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_schema.DocumentSchema()
+            pb_resp = document_schema.DocumentSchema.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_get_document_schema(resp)
+            return resp
+
+    class _ListDocumentSchemas(_BaseDocumentSchemaServiceRestTransport._BaseListDocumentSchemas, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.ListDocumentSchemas")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: document_schema_service.ListDocumentSchemasRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_schema_service.ListDocumentSchemasResponse:
+            r"""Call the list document schemas method over HTTP.
+
+            Args:
+                request (~.document_schema_service.ListDocumentSchemasRequest):
+                    The request object. Request message for
+                DocumentSchemaService.ListDocumentSchemas.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_schema_service.ListDocumentSchemasResponse:
+                    Response message for
+                DocumentSchemaService.ListDocumentSchemas.
+
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseListDocumentSchemas._get_http_options()
+            request, metadata = self._interceptor.pre_list_document_schemas(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseListDocumentSchemas._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseListDocumentSchemas._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._ListDocumentSchemas._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_schema_service.ListDocumentSchemasResponse()
+            pb_resp = document_schema_service.ListDocumentSchemasResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_list_document_schemas(resp)
+            return resp
+
+    class _UpdateDocumentSchema(_BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.UpdateDocumentSchema")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_schema_service.UpdateDocumentSchemaRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> gcc_document_schema.DocumentSchema:
+            r"""Call the update document schema method over HTTP.
+
+            Args:
+                request (~.document_schema_service.UpdateDocumentSchemaRequest):
+                    The request object. Request message for
+                DocumentSchemaService.UpdateDocumentSchema.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.gcc_document_schema.DocumentSchema:
+                    A document schema used to define
+                document structure.
+
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema._get_http_options()
+            request, metadata = self._interceptor.pre_update_document_schema(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._UpdateDocumentSchema._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = gcc_document_schema.DocumentSchema()
+            pb_resp = gcc_document_schema.DocumentSchema.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_update_document_schema(resp)
+            return resp
+
+    @property
+    def create_document_schema(self) -> Callable[
+            [document_schema_service.CreateDocumentSchemaRequest],
+            gcc_document_schema.DocumentSchema]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._CreateDocumentSchema(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def delete_document_schema(self) -> Callable[
+            [document_schema_service.DeleteDocumentSchemaRequest],
+            empty_pb2.Empty]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._DeleteDocumentSchema(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_document_schema(self) -> Callable[
+            [document_schema_service.GetDocumentSchemaRequest],
+            document_schema.DocumentSchema]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._GetDocumentSchema(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def list_document_schemas(self) -> Callable[
+            [document_schema_service.ListDocumentSchemasRequest],
+            document_schema_service.ListDocumentSchemasResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._ListDocumentSchemas(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def update_document_schema(self) -> Callable[
+            [document_schema_service.UpdateDocumentSchemaRequest],
+            gcc_document_schema.DocumentSchema]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._UpdateDocumentSchema(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BaseDocumentSchemaServiceRestTransport._BaseGetOperation, DocumentSchemaServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentSchemaServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BaseDocumentSchemaServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BaseDocumentSchemaServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentSchemaServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentSchemaServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'DocumentSchemaServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest_base.py
new file mode 100644
index 000000000000..12a73c80b895
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest_base.py
@@ -0,0 +1,325 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import DocumentSchemaServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BaseDocumentSchemaServiceRestTransport(DocumentSchemaServiceTransport):
+    """Base REST backend transport for DocumentSchemaService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseCreateDocumentSchema:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*}/documentSchemas',
+                'body': 'document_schema',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_schema_service.CreateDocumentSchemaRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentSchemaServiceRestTransport._BaseCreateDocumentSchema._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseDeleteDocumentSchema:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'delete',
+                'uri': '/v1/{name=projects/*/locations/*/documentSchemas/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_schema_service.DeleteDocumentSchemaRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentSchemaServiceRestTransport._BaseDeleteDocumentSchema._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetDocumentSchema:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/documentSchemas/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_schema_service.GetDocumentSchemaRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentSchemaServiceRestTransport._BaseGetDocumentSchema._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseListDocumentSchemas:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{parent=projects/*/locations/*}/documentSchemas',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_schema_service.ListDocumentSchemasRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentSchemaServiceRestTransport._BaseListDocumentSchemas._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseUpdateDocumentSchema:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'patch',
+                'uri': '/v1/{name=projects/*/locations/*/documentSchemas/*}',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_schema_service.UpdateDocumentSchemaRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentSchemaServiceRestTransport._BaseUpdateDocumentSchema._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BaseDocumentSchemaServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/__init__.py
new file mode 100644
index 000000000000..729ba971afe2
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import DocumentServiceClient
+from .async_client import DocumentServiceAsyncClient
+
+__all__ = (
+    'DocumentServiceClient',
+    'DocumentServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/async_client.py
new file mode 100644
index 000000000000..721bd418f2bc
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/async_client.py
@@ -0,0 +1,1211 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_service import pagers
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.documentai_v1.types import document as gcd_document
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import DocumentServiceGrpcAsyncIOTransport
+from .client import DocumentServiceClient
+
+
+class DocumentServiceAsyncClient:
+    """This service lets you manage document."""
+
+    _client: DocumentServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = DocumentServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = DocumentServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = DocumentServiceClient._DEFAULT_UNIVERSE
+
+    document_path = staticmethod(DocumentServiceClient.document_path)
+    parse_document_path = staticmethod(DocumentServiceClient.parse_document_path)
+    document_schema_path = staticmethod(DocumentServiceClient.document_schema_path)
+    parse_document_schema_path = staticmethod(DocumentServiceClient.parse_document_schema_path)
+    location_path = staticmethod(DocumentServiceClient.location_path)
+    parse_location_path = staticmethod(DocumentServiceClient.parse_location_path)
+    common_billing_account_path = staticmethod(DocumentServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(DocumentServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(DocumentServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(DocumentServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(DocumentServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(DocumentServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(DocumentServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(DocumentServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(DocumentServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(DocumentServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentServiceAsyncClient: The constructed client.
+        """
+        return DocumentServiceClient.from_service_account_info.__func__(DocumentServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentServiceAsyncClient: The constructed client.
+        """
+        return DocumentServiceClient.from_service_account_file.__func__(DocumentServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return DocumentServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> DocumentServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = DocumentServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentServiceTransport, Callable[..., DocumentServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentServiceTransport,Callable[..., DocumentServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = DocumentServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def create_document(self,
+            request: Optional[Union[document_service_request.CreateDocumentRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document: Optional[gcc_document.Document] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.CreateDocumentResponse:
+        r"""Creates a document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_create_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                document = contentwarehouse_v1.Document()
+                document.plain_text = "plain_text_value"
+                document.raw_document_path = "raw_document_path_value"
+                document.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.CreateDocumentRequest(
+                    parent="parent_value",
+                    document=document,
+                )
+
+                # Make the request
+                response = await client.create_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.CreateDocumentRequest, dict]]):
+                The request object. Request message for
+                DocumentService.CreateDocument.
+            parent (:class:`str`):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document (:class:`google.cloud.contentwarehouse_v1.types.Document`):
+                Required. The document to create.
+                This corresponds to the ``document`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.CreateDocumentResponse:
+                Response message for
+                DocumentService.CreateDocument.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.CreateDocumentRequest):
+            request = document_service_request.CreateDocumentRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if document is not None:
+            request.document = document
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.create_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_document(self,
+            request: Optional[Union[document_service_request.GetDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document.Document:
+        r"""Gets a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_get_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.get_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.GetDocumentRequest, dict]]):
+                The request object. Request message for
+                DocumentService.GetDocument.
+            name (:class:`str`):
+                Required. The name of the document to retrieve. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.Document:
+                Defines the structure for content
+                warehouse document proto.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.GetDocumentRequest):
+            request = document_service_request.GetDocumentRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.get_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def update_document(self,
+            request: Optional[Union[document_service_request.UpdateDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            document: Optional[gcc_document.Document] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.UpdateDocumentResponse:
+        r"""Updates a document. Returns INVALID_ARGUMENT if the name of the
+        document is non-empty and does not equal the existing name.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_update_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                document = contentwarehouse_v1.Document()
+                document.plain_text = "plain_text_value"
+                document.raw_document_path = "raw_document_path_value"
+                document.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.UpdateDocumentRequest(
+                    name="name_value",
+                    document=document,
+                )
+
+                # Make the request
+                response = await client.update_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.UpdateDocumentRequest, dict]]):
+                The request object. Request message for
+                DocumentService.UpdateDocument.
+            name (:class:`str`):
+                Required. The name of the document to update. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document (:class:`google.cloud.contentwarehouse_v1.types.Document`):
+                Required. The document to update.
+                This corresponds to the ``document`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.UpdateDocumentResponse:
+                Response message for
+                DocumentService.UpdateDocument.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, document])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.UpdateDocumentRequest):
+            request = document_service_request.UpdateDocumentRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+        if document is not None:
+            request.document = document
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.update_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_document(self,
+            request: Optional[Union[document_service_request.DeleteDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_delete_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                await client.delete_document(request=request)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentRequest, dict]]):
+                The request object. Request message for
+                DocumentService.DeleteDocument.
+            name (:class:`str`):
+                Required. The name of the document to delete. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.DeleteDocumentRequest):
+            request = document_service_request.DeleteDocumentRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.delete_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    async def search_documents(self,
+            request: Optional[Union[document_service_request.SearchDocumentsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.SearchDocumentsAsyncPager:
+        r"""Searches for documents using provided
+        [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+        This call only returns documents that the caller has permission
+        to search against.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_search_documents():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.SearchDocumentsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.search_documents(request=request)
+
+                # Handle the response
+                async for response in page_result:
+                    print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest, dict]]):
+                The request object. Request message for
+                DocumentService.SearchDocuments.
+            parent (:class:`str`):
+                Required. The parent, which owns this collection of
+                documents. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_service.pagers.SearchDocumentsAsyncPager:
+                Response message for
+                DocumentService.SearchDocuments.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.SearchDocumentsRequest):
+            request = document_service_request.SearchDocumentsRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.search_documents]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.SearchDocumentsAsyncPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def lock_document(self,
+            request: Optional[Union[document_service_request.LockDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document.Document:
+        r"""Lock the document so the document cannot be updated
+        by other users.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_lock_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.LockDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.lock_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.LockDocumentRequest, dict]]):
+                The request object. Request message for
+                DocumentService.LockDocument.
+            name (:class:`str`):
+                Required. The name of the document to lock. Format:
+                projects/{project_number}/locations/{location}/documents/{document}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.Document:
+                Defines the structure for content
+                warehouse document proto.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.LockDocumentRequest):
+            request = document_service_request.LockDocumentRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.lock_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def fetch_acl(self,
+            request: Optional[Union[document_service_request.FetchAclRequest, dict]] = None,
+            *,
+            resource: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.FetchAclResponse:
+        r"""Gets the access control policy for a resource. Returns NOT_FOUND
+        error if the resource does not exist. Returns an empty policy if
+        the resource exists but does not have a policy set.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_fetch_acl():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.FetchAclRequest(
+                    resource="resource_value",
+                )
+
+                # Make the request
+                response = await client.fetch_acl(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.FetchAclRequest, dict]]):
+                The request object. Request message for
+                DocumentService.FetchAcl
+            resource (:class:`str`):
+                Required. REQUIRED: The resource for which the policy is
+                being requested. Format for document:
+                projects/{project_number}/locations/{location}/documents/{document_id}.
+                Format for collection:
+                projects/{project_number}/locations/{location}/collections/{collection_id}.
+                Format for project: projects/{project_number}.
+
+                This corresponds to the ``resource`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.FetchAclResponse:
+                Response message for
+                DocumentService.FetchAcl.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([resource])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.FetchAclRequest):
+            request = document_service_request.FetchAclRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if resource is not None:
+            request.resource = resource
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.fetch_acl]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("resource", request.resource),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def set_acl(self,
+            request: Optional[Union[document_service_request.SetAclRequest, dict]] = None,
+            *,
+            resource: Optional[str] = None,
+            policy: Optional[policy_pb2.Policy] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.SetAclResponse:
+        r"""Sets the access control policy for a resource.
+        Replaces any existing policy.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_set_acl():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.SetAclRequest(
+                    resource="resource_value",
+                )
+
+                # Make the request
+                response = await client.set_acl(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.SetAclRequest, dict]]):
+                The request object. Request message for
+                DocumentService.SetAcl.
+            resource (:class:`str`):
+                Required. REQUIRED: The resource for which the policy is
+                being requested. Format for document:
+                projects/{project_number}/locations/{location}/documents/{document_id}.
+                Format for collection:
+                projects/{project_number}/locations/{location}/collections/{collection_id}.
+                Format for project: projects/{project_number}.
+
+                This corresponds to the ``resource`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            policy (:class:`google.iam.v1.policy_pb2.Policy`):
+                Required. REQUIRED: The complete policy to be applied to
+                the ``resource``. The size of the policy is limited to a
+                few 10s of KB. This refers to an Identity and Access
+                (IAM) policy, which specifies access controls for the
+                Document.
+
+                You can set ACL with condition for projects only.
+
+                Supported operators are: ``=``, ``!=``, ``<``, ``<=``,
+                ``>``, and ``>=`` where the left of the operator is
+                ``DocumentSchemaId`` or property name and the right of
+                the operator is a number or a quoted string. You must
+                escape backslash (\) and quote (") characters.
+
+                Boolean expressions (AND/OR) are supported up to 3
+                levels of nesting (for example, "((A AND B AND C) OR D)
+                AND E"), a maximum of 10 comparisons are allowed in the
+                expression. The expression must be < 6000 bytes in
+                length.
+
+                Sample condition:
+                ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"``
+
+                This corresponds to the ``policy`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SetAclResponse:
+                Response message for
+                DocumentService.SetAcl.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([resource, policy])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.SetAclRequest):
+            request = document_service_request.SetAclRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if resource is not None:
+            request.resource = resource
+        if policy is not None:
+            request.policy = policy
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.set_acl]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("resource", request.resource),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "DocumentServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/client.py
new file mode 100644
index 000000000000..71227e61ca51
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/client.py
@@ -0,0 +1,1580 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.document_service import pagers
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.documentai_v1.types import document as gcd_document
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from .transports.base import DocumentServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import DocumentServiceGrpcTransport
+from .transports.grpc_asyncio import DocumentServiceGrpcAsyncIOTransport
+from .transports.rest import DocumentServiceRestTransport
+
+
+class DocumentServiceClientMeta(type):
+    """Metaclass for the DocumentService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentServiceTransport]]
+    _transport_registry["grpc"] = DocumentServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = DocumentServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = DocumentServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[DocumentServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class DocumentServiceClient(metaclass=DocumentServiceClientMeta):
+    """This service lets you manage document."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            DocumentServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> DocumentServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            DocumentServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def document_path(project: str,location: str,document: str,) -> str:
+        """Returns a fully-qualified document string."""
+        return "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+
+    @staticmethod
+    def parse_document_path(path: str) -> Dict[str,str]:
+        """Parses a document path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documents/(?P<document>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def document_schema_path(project: str,location: str,document_schema: str,) -> str:
+        """Returns a fully-qualified document_schema string."""
+        return "projects/{project}/locations/{location}/documentSchemas/{document_schema}".format(project=project, location=location, document_schema=document_schema, )
+
+    @staticmethod
+    def parse_document_schema_path(path: str) -> Dict[str,str]:
+        """Parses a document_schema path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documentSchemas/(?P<document_schema>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def location_path(project: str,location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_location_path(path: str) -> Dict[str,str]:
+        """Parses a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = DocumentServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = DocumentServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = DocumentServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = DocumentServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            DocumentServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, DocumentServiceTransport, Callable[..., DocumentServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the document service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,DocumentServiceTransport,Callable[..., DocumentServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the DocumentServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DocumentServiceClient._read_environment_variables()
+        self._client_cert_source = DocumentServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = DocumentServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, DocumentServiceTransport)
+        if transport_provided:
+            # transport is a DocumentServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(DocumentServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            DocumentServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[DocumentServiceTransport], Callable[..., DocumentServiceTransport]] = (
+                DocumentServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., DocumentServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def create_document(self,
+            request: Optional[Union[document_service_request.CreateDocumentRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            document: Optional[gcc_document.Document] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.CreateDocumentResponse:
+        r"""Creates a document.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_create_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                document = contentwarehouse_v1.Document()
+                document.plain_text = "plain_text_value"
+                document.raw_document_path = "raw_document_path_value"
+                document.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.CreateDocumentRequest(
+                    parent="parent_value",
+                    document=document,
+                )
+
+                # Make the request
+                response = client.create_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.CreateDocumentRequest, dict]):
+                The request object. Request message for
+                DocumentService.CreateDocument.
+            parent (str):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document (google.cloud.contentwarehouse_v1.types.Document):
+                Required. The document to create.
+                This corresponds to the ``document`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.CreateDocumentResponse:
+                Response message for
+                DocumentService.CreateDocument.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, document])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.CreateDocumentRequest):
+            request = document_service_request.CreateDocumentRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if document is not None:
+                request.document = document
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.create_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def get_document(self,
+            request: Optional[Union[document_service_request.GetDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document.Document:
+        r"""Gets a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_get_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.get_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.GetDocumentRequest, dict]):
+                The request object. Request message for
+                DocumentService.GetDocument.
+            name (str):
+                Required. The name of the document to retrieve. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.Document:
+                Defines the structure for content
+                warehouse document proto.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.GetDocumentRequest):
+            request = document_service_request.GetDocumentRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def update_document(self,
+            request: Optional[Union[document_service_request.UpdateDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            document: Optional[gcc_document.Document] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.UpdateDocumentResponse:
+        r"""Updates a document. Returns INVALID_ARGUMENT if the name of the
+        document is non-empty and does not equal the existing name.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_update_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                document = contentwarehouse_v1.Document()
+                document.plain_text = "plain_text_value"
+                document.raw_document_path = "raw_document_path_value"
+                document.display_name = "display_name_value"
+
+                request = contentwarehouse_v1.UpdateDocumentRequest(
+                    name="name_value",
+                    document=document,
+                )
+
+                # Make the request
+                response = client.update_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.UpdateDocumentRequest, dict]):
+                The request object. Request message for
+                DocumentService.UpdateDocument.
+            name (str):
+                Required. The name of the document to update. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            document (google.cloud.contentwarehouse_v1.types.Document):
+                Required. The document to update.
+                This corresponds to the ``document`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.UpdateDocumentResponse:
+                Response message for
+                DocumentService.UpdateDocument.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, document])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.UpdateDocumentRequest):
+            request = document_service_request.UpdateDocumentRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+            if document is not None:
+                request.document = document
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.update_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_document(self,
+            request: Optional[Union[document_service_request.DeleteDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_delete_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                client.delete_document(request=request)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.DeleteDocumentRequest, dict]):
+                The request object. Request message for
+                DocumentService.DeleteDocument.
+            name (str):
+                Required. The name of the document to delete. Format:
+                projects/{project_number}/locations/{location}/documents/{document_id}
+                or
+                projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.DeleteDocumentRequest):
+            request = document_service_request.DeleteDocumentRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.delete_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    def search_documents(self,
+            request: Optional[Union[document_service_request.SearchDocumentsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.SearchDocumentsPager:
+        r"""Searches for documents using provided
+        [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+        This call only returns documents that the caller has permission
+        to search against.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_search_documents():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.SearchDocumentsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.search_documents(request=request)
+
+                # Handle the response
+                for response in page_result:
+                    print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest, dict]):
+                The request object. Request message for
+                DocumentService.SearchDocuments.
+            parent (str):
+                Required. The parent, which owns this collection of
+                documents. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.document_service.pagers.SearchDocumentsPager:
+                Response message for
+                DocumentService.SearchDocuments.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.SearchDocumentsRequest):
+            request = document_service_request.SearchDocumentsRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.search_documents]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.SearchDocumentsPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def lock_document(self,
+            request: Optional[Union[document_service_request.LockDocumentRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> gcc_document.Document:
+        r"""Lock the document so the document cannot be updated
+        by other users.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_lock_document():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.LockDocumentRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.lock_document(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.LockDocumentRequest, dict]):
+                The request object. Request message for
+                DocumentService.LockDocument.
+            name (str):
+                Required. The name of the document to lock. Format:
+                projects/{project_number}/locations/{location}/documents/{document}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.Document:
+                Defines the structure for content
+                warehouse document proto.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.LockDocumentRequest):
+            request = document_service_request.LockDocumentRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.lock_document]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def fetch_acl(self,
+            request: Optional[Union[document_service_request.FetchAclRequest, dict]] = None,
+            *,
+            resource: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.FetchAclResponse:
+        r"""Gets the access control policy for a resource. Returns NOT_FOUND
+        error if the resource does not exist. Returns an empty policy if
+        the resource exists but does not have a policy set.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_fetch_acl():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.FetchAclRequest(
+                    resource="resource_value",
+                )
+
+                # Make the request
+                response = client.fetch_acl(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.FetchAclRequest, dict]):
+                The request object. Request message for
+                DocumentService.FetchAcl
+            resource (str):
+                Required. REQUIRED: The resource for which the policy is
+                being requested. Format for document:
+                projects/{project_number}/locations/{location}/documents/{document_id}.
+                Format for collection:
+                projects/{project_number}/locations/{location}/collections/{collection_id}.
+                Format for project: projects/{project_number}.
+
+                This corresponds to the ``resource`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.FetchAclResponse:
+                Response message for
+                DocumentService.FetchAcl.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([resource])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.FetchAclRequest):
+            request = document_service_request.FetchAclRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if resource is not None:
+                request.resource = resource
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.fetch_acl]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("resource", request.resource),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def set_acl(self,
+            request: Optional[Union[document_service_request.SetAclRequest, dict]] = None,
+            *,
+            resource: Optional[str] = None,
+            policy: Optional[policy_pb2.Policy] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> document_service.SetAclResponse:
+        r"""Sets the access control policy for a resource.
+        Replaces any existing policy.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_set_acl():
+                # Create a client
+                client = contentwarehouse_v1.DocumentServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.SetAclRequest(
+                    resource="resource_value",
+                )
+
+                # Make the request
+                response = client.set_acl(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.SetAclRequest, dict]):
+                The request object. Request message for
+                DocumentService.SetAcl.
+            resource (str):
+                Required. REQUIRED: The resource for which the policy is
+                being requested. Format for document:
+                projects/{project_number}/locations/{location}/documents/{document_id}.
+                Format for collection:
+                projects/{project_number}/locations/{location}/collections/{collection_id}.
+                Format for project: projects/{project_number}.
+
+                This corresponds to the ``resource`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            policy (google.iam.v1.policy_pb2.Policy):
+                Required. REQUIRED: The complete policy to be applied to
+                the ``resource``. The size of the policy is limited to a
+                few 10s of KB. This refers to an Identity and Access
+                (IAM) policy, which specifies access controls for the
+                Document.
+
+                You can set ACL with condition for projects only.
+
+                Supported operators are: ``=``, ``!=``, ``<``, ``<=``,
+                ``>``, and ``>=`` where the left of the operator is
+                ``DocumentSchemaId`` or property name and the right of
+                the operator is a number or a quoted string. You must
+                escape backslash (\) and quote (") characters.
+
+                Boolean expressions (AND/OR) are supported up to 3
+                levels of nesting (for example, "((A AND B AND C) OR D)
+                AND E"), a maximum of 10 comparisons are allowed in the
+                expression. The expression must be < 6000 bytes in
+                length.
+
+                Sample condition:
+                ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"``
+
+                This corresponds to the ``policy`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SetAclResponse:
+                Response message for
+                DocumentService.SetAcl.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([resource, policy])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, document_service_request.SetAclRequest):
+            request = document_service_request.SetAclRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if resource is not None:
+                request.resource = resource
+            if policy is not None:
+                request.policy = policy
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.set_acl]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("resource", request.resource),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def __enter__(self) -> "DocumentServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "DocumentServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/pagers.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/pagers.py
new file mode 100644
index 000000000000..5c2b1157b29c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/pagers.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+
+
+class SearchDocumentsPager:
+    """A pager for iterating through ``search_documents`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``matching_documents`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``SearchDocuments`` requests and continue to iterate
+    through the ``matching_documents`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., document_service.SearchDocumentsResponse],
+            request: document_service_request.SearchDocumentsRequest,
+            response: document_service.SearchDocumentsResponse,
+            *,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse):
+                The initial response object.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_service_request.SearchDocumentsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterator[document_service.SearchDocumentsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterator[document_service.SearchDocumentsResponse.MatchingDocument]:
+        for page in self.pages:
+            yield from page.matching_documents
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
+
+
+class SearchDocumentsAsyncPager:
+    """A pager for iterating through ``search_documents`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``matching_documents`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``SearchDocuments`` requests and continue to iterate
+    through the ``matching_documents`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., Awaitable[document_service.SearchDocumentsResponse]],
+            request: document_service_request.SearchDocumentsRequest,
+            response: document_service.SearchDocumentsResponse,
+            *,
+            retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse):
+                The initial response object.
+            retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = document_service_request.SearchDocumentsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(self) -> AsyncIterator[document_service.SearchDocumentsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+    def __aiter__(self) -> AsyncIterator[document_service.SearchDocumentsResponse.MatchingDocument]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.matching_documents:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/README.rst
new file mode 100644
index 000000000000..4aabee4652ef
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`DocumentServiceTransport` is the ABC for all transports.
+- public child `DocumentServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `DocumentServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseDocumentServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `DocumentServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py
new file mode 100644
index 000000000000..7412085409de
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import DocumentServiceTransport
+from .grpc import DocumentServiceGrpcTransport
+from .grpc_asyncio import DocumentServiceGrpcAsyncIOTransport
+from .rest import DocumentServiceRestTransport
+from .rest import DocumentServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[DocumentServiceTransport]]
+_transport_registry['grpc'] = DocumentServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = DocumentServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = DocumentServiceRestTransport
+
+__all__ = (
+    'DocumentServiceTransport',
+    'DocumentServiceGrpcTransport',
+    'DocumentServiceGrpcAsyncIOTransport',
+    'DocumentServiceRestTransport',
+    'DocumentServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py
new file mode 100644
index 000000000000..fcbf2625ea45
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/base.py
@@ -0,0 +1,288 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class DocumentServiceTransport(abc.ABC):
+    """Abstract transport class for DocumentService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.create_document: gapic_v1.method.wrap_method(
+                self.create_document,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.get_document: gapic_v1.method.wrap_method(
+                self.get_document,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_document: gapic_v1.method.wrap_method(
+                self.update_document,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.delete_document: gapic_v1.method.wrap_method(
+                self.delete_document,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.search_documents: gapic_v1.method.wrap_method(
+                self.search_documents,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.lock_document: gapic_v1.method.wrap_method(
+                self.lock_document,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+            self.fetch_acl: gapic_v1.method.wrap_method(
+                self.fetch_acl,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.set_acl: gapic_v1.method.wrap_method(
+                self.set_acl,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def create_document(self) -> Callable[
+            [document_service_request.CreateDocumentRequest],
+            Union[
+                document_service.CreateDocumentResponse,
+                Awaitable[document_service.CreateDocumentResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_document(self) -> Callable[
+            [document_service_request.GetDocumentRequest],
+            Union[
+                gcc_document.Document,
+                Awaitable[gcc_document.Document]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def update_document(self) -> Callable[
+            [document_service_request.UpdateDocumentRequest],
+            Union[
+                document_service.UpdateDocumentResponse,
+                Awaitable[document_service.UpdateDocumentResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def delete_document(self) -> Callable[
+            [document_service_request.DeleteDocumentRequest],
+            Union[
+                empty_pb2.Empty,
+                Awaitable[empty_pb2.Empty]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def search_documents(self) -> Callable[
+            [document_service_request.SearchDocumentsRequest],
+            Union[
+                document_service.SearchDocumentsResponse,
+                Awaitable[document_service.SearchDocumentsResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def lock_document(self) -> Callable[
+            [document_service_request.LockDocumentRequest],
+            Union[
+                gcc_document.Document,
+                Awaitable[gcc_document.Document]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def fetch_acl(self) -> Callable[
+            [document_service_request.FetchAclRequest],
+            Union[
+                document_service.FetchAclResponse,
+                Awaitable[document_service.FetchAclResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def set_acl(self) -> Callable[
+            [document_service_request.SetAclRequest],
+            Union[
+                document_service.SetAclResponse,
+                Awaitable[document_service.SetAclResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'DocumentServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py
new file mode 100644
index 000000000000..c4de3fba2c00
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc.py
@@ -0,0 +1,484 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class DocumentServiceGrpcTransport(DocumentServiceTransport):
+    """gRPC backend transport for DocumentService.
+
+    This service lets you manage document.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def create_document(self) -> Callable[
+            [document_service_request.CreateDocumentRequest],
+            document_service.CreateDocumentResponse]:
+        r"""Return a callable for the create document method over gRPC.
+
+        Creates a document.
+
+        Returns:
+            Callable[[~.CreateDocumentRequest],
+                    ~.CreateDocumentResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document' not in self._stubs:
+            self._stubs['create_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/CreateDocument',
+                request_serializer=document_service_request.CreateDocumentRequest.serialize,
+                response_deserializer=document_service.CreateDocumentResponse.deserialize,
+            )
+        return self._stubs['create_document']
+
+    @property
+    def get_document(self) -> Callable[
+            [document_service_request.GetDocumentRequest],
+            gcc_document.Document]:
+        r"""Return a callable for the get document method over gRPC.
+
+        Gets a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.GetDocumentRequest],
+                    ~.Document]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_document' not in self._stubs:
+            self._stubs['get_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/GetDocument',
+                request_serializer=document_service_request.GetDocumentRequest.serialize,
+                response_deserializer=gcc_document.Document.deserialize,
+            )
+        return self._stubs['get_document']
+
+    @property
+    def update_document(self) -> Callable[
+            [document_service_request.UpdateDocumentRequest],
+            document_service.UpdateDocumentResponse]:
+        r"""Return a callable for the update document method over gRPC.
+
+        Updates a document. Returns INVALID_ARGUMENT if the name of the
+        document is non-empty and does not equal the existing name.
+
+        Returns:
+            Callable[[~.UpdateDocumentRequest],
+                    ~.UpdateDocumentResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_document' not in self._stubs:
+            self._stubs['update_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/UpdateDocument',
+                request_serializer=document_service_request.UpdateDocumentRequest.serialize,
+                response_deserializer=document_service.UpdateDocumentResponse.deserialize,
+            )
+        return self._stubs['update_document']
+
+    @property
+    def delete_document(self) -> Callable[
+            [document_service_request.DeleteDocumentRequest],
+            empty_pb2.Empty]:
+        r"""Return a callable for the delete document method over gRPC.
+
+        Deletes a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.DeleteDocumentRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document' not in self._stubs:
+            self._stubs['delete_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/DeleteDocument',
+                request_serializer=document_service_request.DeleteDocumentRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document']
+
+    @property
+    def search_documents(self) -> Callable[
+            [document_service_request.SearchDocumentsRequest],
+            document_service.SearchDocumentsResponse]:
+        r"""Return a callable for the search documents method over gRPC.
+
+        Searches for documents using provided
+        [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+        This call only returns documents that the caller has permission
+        to search against.
+
+        Returns:
+            Callable[[~.SearchDocumentsRequest],
+                    ~.SearchDocumentsResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'search_documents' not in self._stubs:
+            self._stubs['search_documents'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/SearchDocuments',
+                request_serializer=document_service_request.SearchDocumentsRequest.serialize,
+                response_deserializer=document_service.SearchDocumentsResponse.deserialize,
+            )
+        return self._stubs['search_documents']
+
+    @property
+    def lock_document(self) -> Callable[
+            [document_service_request.LockDocumentRequest],
+            gcc_document.Document]:
+        r"""Return a callable for the lock document method over gRPC.
+
+        Lock the document so the document cannot be updated
+        by other users.
+
+        Returns:
+            Callable[[~.LockDocumentRequest],
+                    ~.Document]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'lock_document' not in self._stubs:
+            self._stubs['lock_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/LockDocument',
+                request_serializer=document_service_request.LockDocumentRequest.serialize,
+                response_deserializer=gcc_document.Document.deserialize,
+            )
+        return self._stubs['lock_document']
+
+    @property
+    def fetch_acl(self) -> Callable[
+            [document_service_request.FetchAclRequest],
+            document_service.FetchAclResponse]:
+        r"""Return a callable for the fetch acl method over gRPC.
+
+        Gets the access control policy for a resource. Returns NOT_FOUND
+        error if the resource does not exist. Returns an empty policy if
+        the resource exists but does not have a policy set.
+
+        Returns:
+            Callable[[~.FetchAclRequest],
+                    ~.FetchAclResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'fetch_acl' not in self._stubs:
+            self._stubs['fetch_acl'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/FetchAcl',
+                request_serializer=document_service_request.FetchAclRequest.serialize,
+                response_deserializer=document_service.FetchAclResponse.deserialize,
+            )
+        return self._stubs['fetch_acl']
+
+    @property
+    def set_acl(self) -> Callable[
+            [document_service_request.SetAclRequest],
+            document_service.SetAclResponse]:
+        r"""Return a callable for the set acl method over gRPC.
+
+        Sets the access control policy for a resource.
+        Replaces any existing policy.
+
+        Returns:
+            Callable[[~.SetAclRequest],
+                    ~.SetAclResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'set_acl' not in self._stubs:
+            self._stubs['set_acl'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/SetAcl',
+                request_serializer=document_service_request.SetAclRequest.serialize,
+                response_deserializer=document_service.SetAclResponse.deserialize,
+            )
+        return self._stubs['set_acl']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'DocumentServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..9d4840d84bb1
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/grpc_asyncio.py
@@ -0,0 +1,563 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import DocumentServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import DocumentServiceGrpcTransport
+
+
+class DocumentServiceGrpcAsyncIOTransport(DocumentServiceTransport):
+    """gRPC AsyncIO backend transport for DocumentService.
+
+    This service lets you manage document.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def create_document(self) -> Callable[
+            [document_service_request.CreateDocumentRequest],
+            Awaitable[document_service.CreateDocumentResponse]]:
+        r"""Return a callable for the create document method over gRPC.
+
+        Creates a document.
+
+        Returns:
+            Callable[[~.CreateDocumentRequest],
+                    Awaitable[~.CreateDocumentResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_document' not in self._stubs:
+            self._stubs['create_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/CreateDocument',
+                request_serializer=document_service_request.CreateDocumentRequest.serialize,
+                response_deserializer=document_service.CreateDocumentResponse.deserialize,
+            )
+        return self._stubs['create_document']
+
+    @property
+    def get_document(self) -> Callable[
+            [document_service_request.GetDocumentRequest],
+            Awaitable[gcc_document.Document]]:
+        r"""Return a callable for the get document method over gRPC.
+
+        Gets a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.GetDocumentRequest],
+                    Awaitable[~.Document]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_document' not in self._stubs:
+            self._stubs['get_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/GetDocument',
+                request_serializer=document_service_request.GetDocumentRequest.serialize,
+                response_deserializer=gcc_document.Document.deserialize,
+            )
+        return self._stubs['get_document']
+
+    @property
+    def update_document(self) -> Callable[
+            [document_service_request.UpdateDocumentRequest],
+            Awaitable[document_service.UpdateDocumentResponse]]:
+        r"""Return a callable for the update document method over gRPC.
+
+        Updates a document. Returns INVALID_ARGUMENT if the name of the
+        document is non-empty and does not equal the existing name.
+
+        Returns:
+            Callable[[~.UpdateDocumentRequest],
+                    Awaitable[~.UpdateDocumentResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_document' not in self._stubs:
+            self._stubs['update_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/UpdateDocument',
+                request_serializer=document_service_request.UpdateDocumentRequest.serialize,
+                response_deserializer=document_service.UpdateDocumentResponse.deserialize,
+            )
+        return self._stubs['update_document']
+
+    @property
+    def delete_document(self) -> Callable[
+            [document_service_request.DeleteDocumentRequest],
+            Awaitable[empty_pb2.Empty]]:
+        r"""Return a callable for the delete document method over gRPC.
+
+        Deletes a document. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.DeleteDocumentRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_document' not in self._stubs:
+            self._stubs['delete_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/DeleteDocument',
+                request_serializer=document_service_request.DeleteDocumentRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_document']
+
+    @property
+    def search_documents(self) -> Callable[
+            [document_service_request.SearchDocumentsRequest],
+            Awaitable[document_service.SearchDocumentsResponse]]:
+        r"""Return a callable for the search documents method over gRPC.
+
+        Searches for documents using provided
+        [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+        This call only returns documents that the caller has permission
+        to search against.
+
+        Returns:
+            Callable[[~.SearchDocumentsRequest],
+                    Awaitable[~.SearchDocumentsResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'search_documents' not in self._stubs:
+            self._stubs['search_documents'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/SearchDocuments',
+                request_serializer=document_service_request.SearchDocumentsRequest.serialize,
+                response_deserializer=document_service.SearchDocumentsResponse.deserialize,
+            )
+        return self._stubs['search_documents']
+
+    @property
+    def lock_document(self) -> Callable[
+            [document_service_request.LockDocumentRequest],
+            Awaitable[gcc_document.Document]]:
+        r"""Return a callable for the lock document method over gRPC.
+
+        Lock the document so the document cannot be updated
+        by other users.
+
+        Returns:
+            Callable[[~.LockDocumentRequest],
+                    Awaitable[~.Document]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'lock_document' not in self._stubs:
+            self._stubs['lock_document'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/LockDocument',
+                request_serializer=document_service_request.LockDocumentRequest.serialize,
+                response_deserializer=gcc_document.Document.deserialize,
+            )
+        return self._stubs['lock_document']
+
+    @property
+    def fetch_acl(self) -> Callable[
+            [document_service_request.FetchAclRequest],
+            Awaitable[document_service.FetchAclResponse]]:
+        r"""Return a callable for the fetch acl method over gRPC.
+
+        Gets the access control policy for a resource. Returns NOT_FOUND
+        error if the resource does not exist. Returns an empty policy if
+        the resource exists but does not have a policy set.
+
+        Returns:
+            Callable[[~.FetchAclRequest],
+                    Awaitable[~.FetchAclResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'fetch_acl' not in self._stubs:
+            self._stubs['fetch_acl'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/FetchAcl',
+                request_serializer=document_service_request.FetchAclRequest.serialize,
+                response_deserializer=document_service.FetchAclResponse.deserialize,
+            )
+        return self._stubs['fetch_acl']
+
+    @property
+    def set_acl(self) -> Callable[
+            [document_service_request.SetAclRequest],
+            Awaitable[document_service.SetAclResponse]]:
+        r"""Return a callable for the set acl method over gRPC.
+
+        Sets the access control policy for a resource.
+        Replaces any existing policy.
+
+        Returns:
+            Callable[[~.SetAclRequest],
+                    Awaitable[~.SetAclResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'set_acl' not in self._stubs:
+            self._stubs['set_acl'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.DocumentService/SetAcl',
+                request_serializer=document_service_request.SetAclRequest.serialize,
+                response_deserializer=document_service.SetAclResponse.deserialize,
+            )
+        return self._stubs['set_acl']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.create_document: self._wrap_method(
+                self.create_document,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.get_document: self._wrap_method(
+                self.get_document,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_document: self._wrap_method(
+                self.update_document,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.delete_document: self._wrap_method(
+                self.delete_document,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.search_documents: self._wrap_method(
+                self.search_documents,
+                default_timeout=180.0,
+                client_info=client_info,
+            ),
+            self.lock_document: self._wrap_method(
+                self.lock_document,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+            self.fetch_acl: self._wrap_method(
+                self.fetch_acl,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.set_acl: self._wrap_method(
+                self.set_acl,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'DocumentServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py
new file mode 100644
index 000000000000..6ed41f103253
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py
@@ -0,0 +1,1119 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BaseDocumentServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class DocumentServiceRestInterceptor:
+    """Interceptor for DocumentService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the DocumentServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor):
+            def pre_create_document(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_create_document(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_delete_document(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def pre_fetch_acl(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_fetch_acl(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_get_document(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_get_document(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_lock_document(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_lock_document(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_search_documents(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_search_documents(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_set_acl(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_set_acl(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_update_document(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_update_document(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = DocumentServiceRestTransport(interceptor=MyCustomDocumentServiceInterceptor())
+        client = DocumentServiceClient(transport=transport)
+
+
+    """
+    def pre_create_document(self, request: document_service_request.CreateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.CreateDocumentRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for create_document
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_create_document(self, response: document_service.CreateDocumentResponse) -> document_service.CreateDocumentResponse:
+        """Post-rpc interceptor for create_document
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_delete_document(self, request: document_service_request.DeleteDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.DeleteDocumentRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for delete_document
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def pre_fetch_acl(self, request: document_service_request.FetchAclRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.FetchAclRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for fetch_acl
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_fetch_acl(self, response: document_service.FetchAclResponse) -> document_service.FetchAclResponse:
+        """Post-rpc interceptor for fetch_acl
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_document(self, request: document_service_request.GetDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.GetDocumentRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_document
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_get_document(self, response: gcc_document.Document) -> gcc_document.Document:
+        """Post-rpc interceptor for get_document
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_lock_document(self, request: document_service_request.LockDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.LockDocumentRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for lock_document
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_lock_document(self, response: gcc_document.Document) -> gcc_document.Document:
+        """Post-rpc interceptor for lock_document
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_search_documents(self, request: document_service_request.SearchDocumentsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.SearchDocumentsRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for search_documents
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_search_documents(self, response: document_service.SearchDocumentsResponse) -> document_service.SearchDocumentsResponse:
+        """Post-rpc interceptor for search_documents
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_set_acl(self, request: document_service_request.SetAclRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.SetAclRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for set_acl
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_set_acl(self, response: document_service.SetAclResponse) -> document_service.SetAclResponse:
+        """Post-rpc interceptor for set_acl
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_update_document(self, request: document_service_request.UpdateDocumentRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[document_service_request.UpdateDocumentRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for update_document
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_update_document(self, response: document_service.UpdateDocumentResponse) -> document_service.UpdateDocumentResponse:
+        """Post-rpc interceptor for update_document
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the DocumentService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the DocumentService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class DocumentServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: DocumentServiceRestInterceptor
+
+
+class DocumentServiceRestTransport(_BaseDocumentServiceRestTransport):
+    """REST backend synchronous transport for DocumentService.
+
+    This service lets you manage document.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[DocumentServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or DocumentServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    class _CreateDocument(_BaseDocumentServiceRestTransport._BaseCreateDocument, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.CreateDocument")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.CreateDocumentRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_service.CreateDocumentResponse:
+            r"""Call the create document method over HTTP.
+
+            Args:
+                request (~.document_service_request.CreateDocumentRequest):
+                    The request object. Request message for
+                DocumentService.CreateDocument.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_service.CreateDocumentResponse:
+                    Response message for
+                DocumentService.CreateDocument.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseCreateDocument._get_http_options()
+            request, metadata = self._interceptor.pre_create_document(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseCreateDocument._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseCreateDocument._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseCreateDocument._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._CreateDocument._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_service.CreateDocumentResponse()
+            pb_resp = document_service.CreateDocumentResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_create_document(resp)
+            return resp
+
+    class _DeleteDocument(_BaseDocumentServiceRestTransport._BaseDeleteDocument, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.DeleteDocument")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.DeleteDocumentRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ):
+            r"""Call the delete document method over HTTP.
+
+            Args:
+                request (~.document_service_request.DeleteDocumentRequest):
+                    The request object. Request message for
+                DocumentService.DeleteDocument.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseDeleteDocument._get_http_options()
+            request, metadata = self._interceptor.pre_delete_document(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseDeleteDocument._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseDeleteDocument._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseDeleteDocument._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._DeleteDocument._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+    class _FetchAcl(_BaseDocumentServiceRestTransport._BaseFetchAcl, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.FetchAcl")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.FetchAclRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_service.FetchAclResponse:
+            r"""Call the fetch acl method over HTTP.
+
+            Args:
+                request (~.document_service_request.FetchAclRequest):
+                    The request object. Request message for
+                DocumentService.FetchAcl
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_service.FetchAclResponse:
+                    Response message for
+                DocumentService.FetchAcl.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseFetchAcl._get_http_options()
+            request, metadata = self._interceptor.pre_fetch_acl(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseFetchAcl._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseFetchAcl._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseFetchAcl._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._FetchAcl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_service.FetchAclResponse()
+            pb_resp = document_service.FetchAclResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_fetch_acl(resp)
+            return resp
+
+    class _GetDocument(_BaseDocumentServiceRestTransport._BaseGetDocument, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.GetDocument")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.GetDocumentRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> gcc_document.Document:
+            r"""Call the get document method over HTTP.
+
+            Args:
+                request (~.document_service_request.GetDocumentRequest):
+                    The request object. Request message for
+                DocumentService.GetDocument.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.gcc_document.Document:
+                    Defines the structure for content
+                warehouse document proto.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseGetDocument._get_http_options()
+            request, metadata = self._interceptor.pre_get_document(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseGetDocument._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseGetDocument._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseGetDocument._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._GetDocument._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = gcc_document.Document()
+            pb_resp = gcc_document.Document.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_get_document(resp)
+            return resp
+
+    class _LockDocument(_BaseDocumentServiceRestTransport._BaseLockDocument, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.LockDocument")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.LockDocumentRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> gcc_document.Document:
+            r"""Call the lock document method over HTTP.
+
+            Args:
+                request (~.document_service_request.LockDocumentRequest):
+                    The request object. Request message for
+                DocumentService.LockDocument.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.gcc_document.Document:
+                    Defines the structure for content
+                warehouse document proto.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseLockDocument._get_http_options()
+            request, metadata = self._interceptor.pre_lock_document(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseLockDocument._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseLockDocument._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseLockDocument._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._LockDocument._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = gcc_document.Document()
+            pb_resp = gcc_document.Document.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_lock_document(resp)
+            return resp
+
+    class _SearchDocuments(_BaseDocumentServiceRestTransport._BaseSearchDocuments, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.SearchDocuments")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.SearchDocumentsRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_service.SearchDocumentsResponse:
+            r"""Call the search documents method over HTTP.
+
+            Args:
+                request (~.document_service_request.SearchDocumentsRequest):
+                    The request object. Request message for
+                DocumentService.SearchDocuments.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_service.SearchDocumentsResponse:
+                    Response message for
+                DocumentService.SearchDocuments.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseSearchDocuments._get_http_options()
+            request, metadata = self._interceptor.pre_search_documents(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseSearchDocuments._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseSearchDocuments._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseSearchDocuments._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._SearchDocuments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_service.SearchDocumentsResponse()
+            pb_resp = document_service.SearchDocumentsResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_search_documents(resp)
+            return resp
+
+    class _SetAcl(_BaseDocumentServiceRestTransport._BaseSetAcl, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.SetAcl")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.SetAclRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_service.SetAclResponse:
+            r"""Call the set acl method over HTTP.
+
+            Args:
+                request (~.document_service_request.SetAclRequest):
+                    The request object. Request message for
+                DocumentService.SetAcl.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_service.SetAclResponse:
+                    Response message for
+                DocumentService.SetAcl.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseSetAcl._get_http_options()
+            request, metadata = self._interceptor.pre_set_acl(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseSetAcl._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseSetAcl._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseSetAcl._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._SetAcl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_service.SetAclResponse()
+            pb_resp = document_service.SetAclResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_set_acl(resp)
+            return resp
+
+    class _UpdateDocument(_BaseDocumentServiceRestTransport._BaseUpdateDocument, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.UpdateDocument")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: document_service_request.UpdateDocumentRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> document_service.UpdateDocumentResponse:
+            r"""Call the update document method over HTTP.
+
+            Args:
+                request (~.document_service_request.UpdateDocumentRequest):
+                    The request object. Request message for
+                DocumentService.UpdateDocument.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.document_service.UpdateDocumentResponse:
+                    Response message for
+                DocumentService.UpdateDocument.
+
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseUpdateDocument._get_http_options()
+            request, metadata = self._interceptor.pre_update_document(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseUpdateDocument._get_transcoded_request(http_options, request)
+
+            body = _BaseDocumentServiceRestTransport._BaseUpdateDocument._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseUpdateDocument._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._UpdateDocument._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = document_service.UpdateDocumentResponse()
+            pb_resp = document_service.UpdateDocumentResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_update_document(resp)
+            return resp
+
+    @property
+    def create_document(self) -> Callable[
+            [document_service_request.CreateDocumentRequest],
+            document_service.CreateDocumentResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def delete_document(self) -> Callable[
+            [document_service_request.DeleteDocumentRequest],
+            empty_pb2.Empty]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def fetch_acl(self) -> Callable[
+            [document_service_request.FetchAclRequest],
+            document_service.FetchAclResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._FetchAcl(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_document(self) -> Callable[
+            [document_service_request.GetDocumentRequest],
+            gcc_document.Document]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def lock_document(self) -> Callable[
+            [document_service_request.LockDocumentRequest],
+            gcc_document.Document]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._LockDocument(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def search_documents(self) -> Callable[
+            [document_service_request.SearchDocumentsRequest],
+            document_service.SearchDocumentsResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._SearchDocuments(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def set_acl(self) -> Callable[
+            [document_service_request.SetAclRequest],
+            document_service.SetAclResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._SetAcl(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def update_document(self) -> Callable[
+            [document_service_request.UpdateDocumentRequest],
+            document_service.UpdateDocumentResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BaseDocumentServiceRestTransport._BaseGetOperation, DocumentServiceRestStub):
+        def __hash__(self):
+            return hash("DocumentServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BaseDocumentServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BaseDocumentServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseDocumentServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = DocumentServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'DocumentServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest_base.py
new file mode 100644
index 000000000000..c5ea30bcf18f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/document_service/transports/rest_base.py
@@ -0,0 +1,521 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import DocumentServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BaseDocumentServiceRestTransport(DocumentServiceTransport):
+    """Base REST backend transport for DocumentService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseCreateDocument:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*}/documents',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.CreateDocumentRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseCreateDocument._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseDeleteDocument:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/*}:delete',
+                'body': '*',
+            },
+        {
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/referenceId/*}:delete',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.DeleteDocumentRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseDeleteDocument._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseFetchAcl:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{resource=projects/*/locations/*/documents/*}:fetchAcl',
+                'body': '*',
+            },
+        {
+                'method': 'post',
+                'uri': '/v1/{resource=projects/*}:fetchAcl',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.FetchAclRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseFetchAcl._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetDocument:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/*}:get',
+                'body': '*',
+            },
+        {
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/referenceId/*}:get',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.GetDocumentRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseGetDocument._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseLockDocument:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*/documents/*}:lock',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.LockDocumentRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseLockDocument._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseSearchDocuments:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*}/documents:search',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.SearchDocumentsRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseSearchDocuments._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseSetAcl:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{resource=projects/*/locations/*/documents/*}:setAcl',
+                'body': '*',
+            },
+        {
+                'method': 'post',
+                'uri': '/v1/{resource=projects/*}:setAcl',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.SetAclRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseSetAcl._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseUpdateDocument:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'patch',
+                'uri': '/v1/{name=projects/*/locations/*/documents/*}',
+                'body': '*',
+            },
+        {
+                'method': 'patch',
+                'uri': '/v1/{name=projects/*/locations/*/documents/referenceId/*}',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = document_service_request.UpdateDocumentRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseDocumentServiceRestTransport._BaseUpdateDocument._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BaseDocumentServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py
new file mode 100644
index 000000000000..b9863e3b92b3
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import PipelineServiceClient
+from .async_client import PipelineServiceAsyncClient
+
+__all__ = (
+    'PipelineServiceClient',
+    'PipelineServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py
new file mode 100644
index 000000000000..2cee4ba20acf
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/async_client.py
@@ -0,0 +1,412 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.api_core import operation  # type: ignore
+from google.api_core import operation_async  # type: ignore
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.cloud.contentwarehouse_v1.types import pipelines
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import PipelineServiceGrpcAsyncIOTransport
+from .client import PipelineServiceClient
+
+
+class PipelineServiceAsyncClient:
+    """This service lets you manage pipelines."""
+
+    _client: PipelineServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = PipelineServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = PipelineServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = PipelineServiceClient._DEFAULT_UNIVERSE
+
+    cloud_function_path = staticmethod(PipelineServiceClient.cloud_function_path)
+    parse_cloud_function_path = staticmethod(PipelineServiceClient.parse_cloud_function_path)
+    location_path = staticmethod(PipelineServiceClient.location_path)
+    parse_location_path = staticmethod(PipelineServiceClient.parse_location_path)
+    common_billing_account_path = staticmethod(PipelineServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(PipelineServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(PipelineServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(PipelineServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(PipelineServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(PipelineServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(PipelineServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(PipelineServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(PipelineServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(PipelineServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            PipelineServiceAsyncClient: The constructed client.
+        """
+        return PipelineServiceClient.from_service_account_info.__func__(PipelineServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            PipelineServiceAsyncClient: The constructed client.
+        """
+        return PipelineServiceClient.from_service_account_file.__func__(PipelineServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return PipelineServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> PipelineServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            PipelineServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = PipelineServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, PipelineServiceTransport, Callable[..., PipelineServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the pipeline service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,PipelineServiceTransport,Callable[..., PipelineServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the PipelineServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = PipelineServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def run_pipeline(self,
+            request: Optional[Union[pipeline_service.RunPipelineRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> operation_async.AsyncOperation:
+        r"""Run a predefined pipeline.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_run_pipeline():
+                # Create a client
+                client = contentwarehouse_v1.PipelineServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.RunPipelineRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                operation = client.run_pipeline(request=request)
+
+                print("Waiting for operation to complete...")
+
+                response = (await operation).result()
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.RunPipelineRequest, dict]]):
+                The request object. Request message for
+                DocumentService.RunPipeline.
+            name (:class:`str`):
+                Required. The resource name which owns the resources of
+                the pipeline. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.api_core.operation_async.AsyncOperation:
+                An object representing a long-running operation.
+
+                The result type for the operation will be
+                :class:`google.cloud.contentwarehouse_v1.types.RunPipelineResponse`
+                Response message of RunPipeline method.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, pipeline_service.RunPipelineRequest):
+            request = pipeline_service.RunPipelineRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.run_pipeline]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Wrap the response in an operation future.
+        response = operation_async.from_gapic(
+            response,
+            self._client._transport.operations_client,
+            pipelines.RunPipelineResponse,
+            metadata_type=pipelines.RunPipelineMetadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "PipelineServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "PipelineServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py
new file mode 100644
index 000000000000..864ecaae2c16
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py
@@ -0,0 +1,779 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.api_core import operation  # type: ignore
+from google.api_core import operation_async  # type: ignore
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.cloud.contentwarehouse_v1.types import pipelines
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import PipelineServiceGrpcTransport
+from .transports.grpc_asyncio import PipelineServiceGrpcAsyncIOTransport
+from .transports.rest import PipelineServiceRestTransport
+
+
+class PipelineServiceClientMeta(type):
+    """Metaclass for the PipelineService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[PipelineServiceTransport]]
+    _transport_registry["grpc"] = PipelineServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = PipelineServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = PipelineServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[PipelineServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class PipelineServiceClient(metaclass=PipelineServiceClientMeta):
+    """This service lets you manage pipelines."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            PipelineServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            PipelineServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> PipelineServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            PipelineServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def cloud_function_path(project: str,location: str,function: str,) -> str:
+        """Returns a fully-qualified cloud_function string."""
+        return "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, )
+
+    @staticmethod
+    def parse_cloud_function_path(path: str) -> Dict[str,str]:
+        """Parses a cloud_function path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/functions/(?P<function>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def location_path(project: str,location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_location_path(path: str) -> Dict[str,str]:
+        """Parses a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = PipelineServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = PipelineServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = PipelineServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = PipelineServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            PipelineServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, PipelineServiceTransport, Callable[..., PipelineServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the pipeline service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,PipelineServiceTransport,Callable[..., PipelineServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the PipelineServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = PipelineServiceClient._read_environment_variables()
+        self._client_cert_source = PipelineServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = PipelineServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, PipelineServiceTransport)
+        if transport_provided:
+            # transport is a PipelineServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(PipelineServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            PipelineServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[PipelineServiceTransport], Callable[..., PipelineServiceTransport]] = (
+                PipelineServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., PipelineServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def run_pipeline(self,
+            request: Optional[Union[pipeline_service.RunPipelineRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> operation.Operation:
+        r"""Run a predefined pipeline.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_run_pipeline():
+                # Create a client
+                client = contentwarehouse_v1.PipelineServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.RunPipelineRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                operation = client.run_pipeline(request=request)
+
+                print("Waiting for operation to complete...")
+
+                response = operation.result()
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.RunPipelineRequest, dict]):
+                The request object. Request message for
+                DocumentService.RunPipeline.
+            name (str):
+                Required. The resource name which owns the resources of
+                the pipeline. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.api_core.operation.Operation:
+                An object representing a long-running operation.
+
+                The result type for the operation will be
+                :class:`google.cloud.contentwarehouse_v1.types.RunPipelineResponse`
+                Response message of RunPipeline method.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, pipeline_service.RunPipelineRequest):
+            request = pipeline_service.RunPipelineRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.run_pipeline]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Wrap the response in an operation future.
+        response = operation.from_gapic(
+            response,
+            self._transport.operations_client,
+            pipelines.RunPipelineResponse,
+            metadata_type=pipelines.RunPipelineMetadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def __enter__(self) -> "PipelineServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "PipelineServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/README.rst
new file mode 100644
index 000000000000..b5f17d1cea76
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`PipelineServiceTransport` is the ABC for all transports.
+- public child `PipelineServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `PipelineServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BasePipelineServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `PipelineServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py
new file mode 100644
index 000000000000..17cccf11a4c8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import PipelineServiceTransport
+from .grpc import PipelineServiceGrpcTransport
+from .grpc_asyncio import PipelineServiceGrpcAsyncIOTransport
+from .rest import PipelineServiceRestTransport
+from .rest import PipelineServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[PipelineServiceTransport]]
+_transport_registry['grpc'] = PipelineServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = PipelineServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = PipelineServiceRestTransport
+
+__all__ = (
+    'PipelineServiceTransport',
+    'PipelineServiceGrpcTransport',
+    'PipelineServiceGrpcAsyncIOTransport',
+    'PipelineServiceRestTransport',
+    'PipelineServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py
new file mode 100644
index 000000000000..67925221a675
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/base.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import operations_v1
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.longrunning import operations_pb2 # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class PipelineServiceTransport(abc.ABC):
+    """Abstract transport class for PipelineService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.run_pipeline: gapic_v1.method.wrap_method(
+                self.run_pipeline,
+                default_retry=retries.Retry(
+                    initial=0.1,
+                    maximum=60.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.DeadlineExceeded,
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def operations_client(self):
+        """Return the client designed to process long-running operations."""
+        raise NotImplementedError()
+
+    @property
+    def run_pipeline(self) -> Callable[
+            [pipeline_service.RunPipelineRequest],
+            Union[
+                operations_pb2.Operation,
+                Awaitable[operations_pb2.Operation]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'PipelineServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py
new file mode 100644
index 000000000000..bc3372b489a3
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import operations_v1
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.longrunning import operations_pb2 # type: ignore
+from .base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class PipelineServiceGrpcTransport(PipelineServiceTransport):
+    """gRPC backend transport for PipelineService.
+
+    This service lets you manage pipelines.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+        self._operations_client: Optional[operations_v1.OperationsClient] = None
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def operations_client(self) -> operations_v1.OperationsClient:
+        """Create the client designed to process long-running operations.
+
+        This property caches on the instance; repeated calls return the same
+        client.
+        """
+        # Quick check: Only create a new client if we do not already have one.
+        if self._operations_client is None:
+            self._operations_client = operations_v1.OperationsClient(
+                self.grpc_channel
+            )
+
+        # Return the client from cache.
+        return self._operations_client
+
+    @property
+    def run_pipeline(self) -> Callable[
+            [pipeline_service.RunPipelineRequest],
+            operations_pb2.Operation]:
+        r"""Return a callable for the run pipeline method over gRPC.
+
+        Run a predefined pipeline.
+
+        Returns:
+            Callable[[~.RunPipelineRequest],
+                    ~.Operation]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'run_pipeline' not in self._stubs:
+            self._stubs['run_pipeline'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.PipelineService/RunPipeline',
+                request_serializer=pipeline_service.RunPipelineRequest.serialize,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs['run_pipeline']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'PipelineServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..6c4aee795646
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/grpc_asyncio.py
@@ -0,0 +1,343 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.api_core import operations_v1
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.longrunning import operations_pb2 # type: ignore
+from .base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import PipelineServiceGrpcTransport
+
+
+class PipelineServiceGrpcAsyncIOTransport(PipelineServiceTransport):
+    """gRPC AsyncIO backend transport for PipelineService.
+
+    This service lets you manage pipelines.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+        self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def operations_client(self) -> operations_v1.OperationsAsyncClient:
+        """Create the client designed to process long-running operations.
+
+        This property caches on the instance; repeated calls return the same
+        client.
+        """
+        # Quick check: Only create a new client if we do not already have one.
+        if self._operations_client is None:
+            self._operations_client = operations_v1.OperationsAsyncClient(
+                self.grpc_channel
+            )
+
+        # Return the client from cache.
+        return self._operations_client
+
+    @property
+    def run_pipeline(self) -> Callable[
+            [pipeline_service.RunPipelineRequest],
+            Awaitable[operations_pb2.Operation]]:
+        r"""Return a callable for the run pipeline method over gRPC.
+
+        Run a predefined pipeline.
+
+        Returns:
+            Callable[[~.RunPipelineRequest],
+                    Awaitable[~.Operation]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'run_pipeline' not in self._stubs:
+            self._stubs['run_pipeline'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.PipelineService/RunPipeline',
+                request_serializer=pipeline_service.RunPipelineRequest.serialize,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs['run_pipeline']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.run_pipeline: self._wrap_method(
+                self.run_pipeline,
+                default_retry=retries.AsyncRetry(
+                    initial=0.1,
+                    maximum=60.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.DeadlineExceeded,
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'PipelineServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py
new file mode 100644
index 000000000000..f90c2c67bf59
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py
@@ -0,0 +1,405 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from google.api_core import operations_v1
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BasePipelineServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class PipelineServiceRestInterceptor:
+    """Interceptor for PipelineService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the PipelineServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomPipelineServiceInterceptor(PipelineServiceRestInterceptor):
+            def pre_run_pipeline(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_run_pipeline(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = PipelineServiceRestTransport(interceptor=MyCustomPipelineServiceInterceptor())
+        client = PipelineServiceClient(transport=transport)
+
+
+    """
+    def pre_run_pipeline(self, request: pipeline_service.RunPipelineRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[pipeline_service.RunPipelineRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for run_pipeline
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the PipelineService server.
+        """
+        return request, metadata
+
+    def post_run_pipeline(self, response: operations_pb2.Operation) -> operations_pb2.Operation:
+        """Post-rpc interceptor for run_pipeline
+
+        Override in a subclass to manipulate the response
+        after it is returned by the PipelineService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the PipelineService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the PipelineService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class PipelineServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: PipelineServiceRestInterceptor
+
+
+class PipelineServiceRestTransport(_BasePipelineServiceRestTransport):
+    """REST backend synchronous transport for PipelineService.
+
+    This service lets you manage pipelines.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[PipelineServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or PipelineServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def operations_client(self) -> operations_v1.AbstractOperationsClient:
+        """Create the client designed to process long-running operations.
+
+        This property caches on the instance; repeated calls return the same
+        client.
+        """
+        # Only create a new client if we do not already have one.
+        if self._operations_client is None:
+            http_options: Dict[str, List[Dict[str, str]]] = {
+                'google.longrunning.Operations.GetOperation': [
+                    {
+                        'method': 'get',
+                        'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+                    },
+                ],
+            }
+
+            rest_transport = operations_v1.OperationsRestTransport(
+                    host=self._host,
+                    # use the credentials which are saved
+                    credentials=self._credentials,
+                    scopes=self._scopes,
+                    http_options=http_options,
+                    path_prefix="v1")
+
+            self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport)
+
+        # Return the client from cache.
+        return self._operations_client
+
+    class _RunPipeline(_BasePipelineServiceRestTransport._BaseRunPipeline, PipelineServiceRestStub):
+        def __hash__(self):
+            return hash("PipelineServiceRestTransport.RunPipeline")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: pipeline_service.RunPipelineRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> operations_pb2.Operation:
+            r"""Call the run pipeline method over HTTP.
+
+            Args:
+                request (~.pipeline_service.RunPipelineRequest):
+                    The request object. Request message for
+                DocumentService.RunPipeline.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.operations_pb2.Operation:
+                    This resource represents a
+                long-running operation that is the
+                result of a network API call.
+
+            """
+
+            http_options = _BasePipelineServiceRestTransport._BaseRunPipeline._get_http_options()
+            request, metadata = self._interceptor.pre_run_pipeline(request, metadata)
+            transcoded_request = _BasePipelineServiceRestTransport._BaseRunPipeline._get_transcoded_request(http_options, request)
+
+            body = _BasePipelineServiceRestTransport._BaseRunPipeline._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BasePipelineServiceRestTransport._BaseRunPipeline._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = PipelineServiceRestTransport._RunPipeline._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = operations_pb2.Operation()
+            json_format.Parse(response.content, resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_run_pipeline(resp)
+            return resp
+
+    @property
+    def run_pipeline(self) -> Callable[
+            [pipeline_service.RunPipelineRequest],
+            operations_pb2.Operation]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._RunPipeline(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BasePipelineServiceRestTransport._BaseGetOperation, PipelineServiceRestStub):
+        def __hash__(self):
+            return hash("PipelineServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BasePipelineServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BasePipelineServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BasePipelineServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = PipelineServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'PipelineServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest_base.py
new file mode 100644
index 000000000000..263aef114042
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest_base.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import PipelineServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BasePipelineServiceRestTransport(PipelineServiceTransport):
+    """Base REST backend transport for PipelineService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseRunPipeline:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{name=projects/*/locations/*}:runPipeline',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = pipeline_service.RunPipelineRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BasePipelineServiceRestTransport._BaseRunPipeline._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BasePipelineServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/__init__.py
new file mode 100644
index 000000000000..f74f7a74d37d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import RuleSetServiceClient
+from .async_client import RuleSetServiceAsyncClient
+
+__all__ = (
+    'RuleSetServiceClient',
+    'RuleSetServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py
new file mode 100644
index 000000000000..f00aebb6c669
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/async_client.py
@@ -0,0 +1,831 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.rule_set_service import pagers
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import RuleSetServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import RuleSetServiceGrpcAsyncIOTransport
+from .client import RuleSetServiceClient
+
+
+class RuleSetServiceAsyncClient:
+    """Service to manage customer specific RuleSets."""
+
+    _client: RuleSetServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = RuleSetServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = RuleSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = RuleSetServiceClient._DEFAULT_UNIVERSE
+
+    document_path = staticmethod(RuleSetServiceClient.document_path)
+    parse_document_path = staticmethod(RuleSetServiceClient.parse_document_path)
+    location_path = staticmethod(RuleSetServiceClient.location_path)
+    parse_location_path = staticmethod(RuleSetServiceClient.parse_location_path)
+    rule_set_path = staticmethod(RuleSetServiceClient.rule_set_path)
+    parse_rule_set_path = staticmethod(RuleSetServiceClient.parse_rule_set_path)
+    common_billing_account_path = staticmethod(RuleSetServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(RuleSetServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(RuleSetServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(RuleSetServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(RuleSetServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(RuleSetServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(RuleSetServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(RuleSetServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(RuleSetServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(RuleSetServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            RuleSetServiceAsyncClient: The constructed client.
+        """
+        return RuleSetServiceClient.from_service_account_info.__func__(RuleSetServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            RuleSetServiceAsyncClient: The constructed client.
+        """
+        return RuleSetServiceClient.from_service_account_file.__func__(RuleSetServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return RuleSetServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> RuleSetServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            RuleSetServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = RuleSetServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, RuleSetServiceTransport, Callable[..., RuleSetServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the rule set service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,RuleSetServiceTransport,Callable[..., RuleSetServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the RuleSetServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = RuleSetServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def create_rule_set(self,
+            request: Optional[Union[ruleset_service_request.CreateRuleSetRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            rule_set: Optional[rule_engine.RuleSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Creates a ruleset.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_create_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateRuleSetRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = await client.create_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.CreateRuleSetRequest, dict]]):
+                The request object. Request message for
+                RuleSetService.CreateRuleSet.
+            parent (:class:`str`):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            rule_set (:class:`google.cloud.contentwarehouse_v1.types.RuleSet`):
+                Required. The rule set to create.
+                This corresponds to the ``rule_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, rule_set])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.CreateRuleSetRequest):
+            request = ruleset_service_request.CreateRuleSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if rule_set is not None:
+            request.rule_set = rule_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.create_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_rule_set(self,
+            request: Optional[Union[ruleset_service_request.GetRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_get_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.get_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.GetRuleSetRequest, dict]]):
+                The request object. Request message for
+                RuleSetService.GetRuleSet.
+            name (:class:`str`):
+                Required. The name of the rule set to retrieve. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.GetRuleSetRequest):
+            request = ruleset_service_request.GetRuleSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.get_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def update_rule_set(self,
+            request: Optional[Union[ruleset_service_request.UpdateRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            rule_set: Optional[rule_engine.RuleSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Updates a ruleset. Returns INVALID_ARGUMENT if the name of the
+        ruleset is non-empty and does not equal the existing name.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_update_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.UpdateRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.update_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.UpdateRuleSetRequest, dict]]):
+                The request object. Request message for
+                RuleSetService.UpdateRuleSet.
+            name (:class:`str`):
+                Required. The name of the rule set to update. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            rule_set (:class:`google.cloud.contentwarehouse_v1.types.RuleSet`):
+                Required. The rule set to update.
+                This corresponds to the ``rule_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, rule_set])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.UpdateRuleSetRequest):
+            request = ruleset_service_request.UpdateRuleSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+        if rule_set is not None:
+            request.rule_set = rule_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.update_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_rule_set(self,
+            request: Optional[Union[ruleset_service_request.DeleteRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a ruleset. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_delete_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                await client.delete_rule_set(request=request)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.DeleteRuleSetRequest, dict]]):
+                The request object. Request message for
+                RuleSetService.DeleteRuleSet.
+            name (:class:`str`):
+                Required. The name of the rule set to delete. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.DeleteRuleSetRequest):
+            request = ruleset_service_request.DeleteRuleSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.delete_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    async def list_rule_sets(self,
+            request: Optional[Union[ruleset_service_request.ListRuleSetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListRuleSetsAsyncPager:
+        r"""Lists rulesets.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_list_rule_sets():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListRuleSetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_rule_sets(request=request)
+
+                # Handle the response
+                async for response in page_result:
+                    print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest, dict]]):
+                The request object. Request message for
+                RuleSetService.ListRuleSets.
+            parent (:class:`str`):
+                Required. The parent, which owns this collection of
+                document. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.rule_set_service.pagers.ListRuleSetsAsyncPager:
+                Response message for
+                RuleSetService.ListRuleSets.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.ListRuleSetsRequest):
+            request = ruleset_service_request.ListRuleSetsRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.list_rule_sets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListRuleSetsAsyncPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "RuleSetServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "RuleSetServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py
new file mode 100644
index 000000000000..4f4cfbf093ca
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py
@@ -0,0 +1,1203 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.rule_set_service import pagers
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import RuleSetServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import RuleSetServiceGrpcTransport
+from .transports.grpc_asyncio import RuleSetServiceGrpcAsyncIOTransport
+from .transports.rest import RuleSetServiceRestTransport
+
+
+class RuleSetServiceClientMeta(type):
+    """Metaclass for the RuleSetService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[RuleSetServiceTransport]]
+    _transport_registry["grpc"] = RuleSetServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = RuleSetServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = RuleSetServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[RuleSetServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class RuleSetServiceClient(metaclass=RuleSetServiceClientMeta):
+    """Service to manage customer specific RuleSets."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            RuleSetServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            RuleSetServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> RuleSetServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            RuleSetServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def document_path(project: str,location: str,document: str,) -> str:
+        """Returns a fully-qualified document string."""
+        return "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+
+    @staticmethod
+    def parse_document_path(path: str) -> Dict[str,str]:
+        """Parses a document path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/documents/(?P<document>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def location_path(project: str,location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_location_path(path: str) -> Dict[str,str]:
+        """Parses a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def rule_set_path(project: str,location: str,rule_set: str,) -> str:
+        """Returns a fully-qualified rule_set string."""
+        return "projects/{project}/locations/{location}/ruleSets/{rule_set}".format(project=project, location=location, rule_set=rule_set, )
+
+    @staticmethod
+    def parse_rule_set_path(path: str) -> Dict[str,str]:
+        """Parses a rule_set path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/ruleSets/(?P<rule_set>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = RuleSetServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = RuleSetServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = RuleSetServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = RuleSetServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            RuleSetServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, RuleSetServiceTransport, Callable[..., RuleSetServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the rule set service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,RuleSetServiceTransport,Callable[..., RuleSetServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the RuleSetServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = RuleSetServiceClient._read_environment_variables()
+        self._client_cert_source = RuleSetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = RuleSetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, RuleSetServiceTransport)
+        if transport_provided:
+            # transport is a RuleSetServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(RuleSetServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            RuleSetServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[RuleSetServiceTransport], Callable[..., RuleSetServiceTransport]] = (
+                RuleSetServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., RuleSetServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def create_rule_set(self,
+            request: Optional[Union[ruleset_service_request.CreateRuleSetRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            rule_set: Optional[rule_engine.RuleSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Creates a ruleset.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_create_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateRuleSetRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = client.create_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.CreateRuleSetRequest, dict]):
+                The request object. Request message for
+                RuleSetService.CreateRuleSet.
+            parent (str):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            rule_set (google.cloud.contentwarehouse_v1.types.RuleSet):
+                Required. The rule set to create.
+                This corresponds to the ``rule_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, rule_set])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.CreateRuleSetRequest):
+            request = ruleset_service_request.CreateRuleSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if rule_set is not None:
+                request.rule_set = rule_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.create_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def get_rule_set(self,
+            request: Optional[Union[ruleset_service_request.GetRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_get_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.get_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.GetRuleSetRequest, dict]):
+                The request object. Request message for
+                RuleSetService.GetRuleSet.
+            name (str):
+                Required. The name of the rule set to retrieve. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.GetRuleSetRequest):
+            request = ruleset_service_request.GetRuleSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def update_rule_set(self,
+            request: Optional[Union[ruleset_service_request.UpdateRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            rule_set: Optional[rule_engine.RuleSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> rule_engine.RuleSet:
+        r"""Updates a ruleset. Returns INVALID_ARGUMENT if the name of the
+        ruleset is non-empty and does not equal the existing name.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_update_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.UpdateRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.update_rule_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.UpdateRuleSetRequest, dict]):
+                The request object. Request message for
+                RuleSetService.UpdateRuleSet.
+            name (str):
+                Required. The name of the rule set to update. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            rule_set (google.cloud.contentwarehouse_v1.types.RuleSet):
+                Required. The rule set to update.
+                This corresponds to the ``rule_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.RuleSet:
+                Represents a set of rules from a
+                single customer.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, rule_set])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.UpdateRuleSetRequest):
+            request = ruleset_service_request.UpdateRuleSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+            if rule_set is not None:
+                request.rule_set = rule_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.update_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_rule_set(self,
+            request: Optional[Union[ruleset_service_request.DeleteRuleSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a ruleset. Returns NOT_FOUND if the document does not
+        exist.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_delete_rule_set():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteRuleSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                client.delete_rule_set(request=request)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.DeleteRuleSetRequest, dict]):
+                The request object. Request message for
+                RuleSetService.DeleteRuleSet.
+            name (str):
+                Required. The name of the rule set to delete. Format:
+                projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.DeleteRuleSetRequest):
+            request = ruleset_service_request.DeleteRuleSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.delete_rule_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    def list_rule_sets(self,
+            request: Optional[Union[ruleset_service_request.ListRuleSetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListRuleSetsPager:
+        r"""Lists rulesets.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_list_rule_sets():
+                # Create a client
+                client = contentwarehouse_v1.RuleSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListRuleSetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_rule_sets(request=request)
+
+                # Handle the response
+                for response in page_result:
+                    print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest, dict]):
+                The request object. Request message for
+                RuleSetService.ListRuleSets.
+            parent (str):
+                Required. The parent, which owns this collection of
+                document. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.rule_set_service.pagers.ListRuleSetsPager:
+                Response message for
+                RuleSetService.ListRuleSets.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, ruleset_service_request.ListRuleSetsRequest):
+            request = ruleset_service_request.ListRuleSetsRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_rule_sets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListRuleSetsPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def __enter__(self) -> "RuleSetServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "RuleSetServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/pagers.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/pagers.py
new file mode 100644
index 000000000000..f88f7a0af1e2
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/pagers.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+
+
+class ListRuleSetsPager:
+    """A pager for iterating through ``list_rule_sets`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``rule_sets`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListRuleSets`` requests and continue to iterate
+    through the ``rule_sets`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., ruleset_service_request.ListRuleSetsResponse],
+            request: ruleset_service_request.ListRuleSetsRequest,
+            response: ruleset_service_request.ListRuleSetsResponse,
+            *,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse):
+                The initial response object.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = ruleset_service_request.ListRuleSetsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterator[ruleset_service_request.ListRuleSetsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterator[rule_engine.RuleSet]:
+        for page in self.pages:
+            yield from page.rule_sets
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
+
+
+class ListRuleSetsAsyncPager:
+    """A pager for iterating through ``list_rule_sets`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``rule_sets`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListRuleSets`` requests and continue to iterate
+    through the ``rule_sets`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., Awaitable[ruleset_service_request.ListRuleSetsResponse]],
+            request: ruleset_service_request.ListRuleSetsRequest,
+            response: ruleset_service_request.ListRuleSetsResponse,
+            *,
+            retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListRuleSetsResponse):
+                The initial response object.
+            retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = ruleset_service_request.ListRuleSetsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(self) -> AsyncIterator[ruleset_service_request.ListRuleSetsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+    def __aiter__(self) -> AsyncIterator[rule_engine.RuleSet]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.rule_sets:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/README.rst
new file mode 100644
index 000000000000..779e83f473d9
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`RuleSetServiceTransport` is the ABC for all transports.
+- public child `RuleSetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `RuleSetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseRuleSetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `RuleSetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py
new file mode 100644
index 000000000000..5d0ae526e6ea
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import RuleSetServiceTransport
+from .grpc import RuleSetServiceGrpcTransport
+from .grpc_asyncio import RuleSetServiceGrpcAsyncIOTransport
+from .rest import RuleSetServiceRestTransport
+from .rest import RuleSetServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[RuleSetServiceTransport]]
+_transport_registry['grpc'] = RuleSetServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = RuleSetServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = RuleSetServiceRestTransport
+
+__all__ = (
+    'RuleSetServiceTransport',
+    'RuleSetServiceGrpcTransport',
+    'RuleSetServiceGrpcAsyncIOTransport',
+    'RuleSetServiceRestTransport',
+    'RuleSetServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py
new file mode 100644
index 000000000000..d82a1c8899f2
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/base.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class RuleSetServiceTransport(abc.ABC):
+    """Abstract transport class for RuleSetService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.create_rule_set: gapic_v1.method.wrap_method(
+                self.create_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_rule_set: gapic_v1.method.wrap_method(
+                self.get_rule_set,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_rule_set: gapic_v1.method.wrap_method(
+                self.update_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_rule_set: gapic_v1.method.wrap_method(
+                self.delete_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_rule_sets: gapic_v1.method.wrap_method(
+                self.list_rule_sets,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def create_rule_set(self) -> Callable[
+            [ruleset_service_request.CreateRuleSetRequest],
+            Union[
+                rule_engine.RuleSet,
+                Awaitable[rule_engine.RuleSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_rule_set(self) -> Callable[
+            [ruleset_service_request.GetRuleSetRequest],
+            Union[
+                rule_engine.RuleSet,
+                Awaitable[rule_engine.RuleSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def update_rule_set(self) -> Callable[
+            [ruleset_service_request.UpdateRuleSetRequest],
+            Union[
+                rule_engine.RuleSet,
+                Awaitable[rule_engine.RuleSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def delete_rule_set(self) -> Callable[
+            [ruleset_service_request.DeleteRuleSetRequest],
+            Union[
+                empty_pb2.Empty,
+                Awaitable[empty_pb2.Empty]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def list_rule_sets(self) -> Callable[
+            [ruleset_service_request.ListRuleSetsRequest],
+            Union[
+                ruleset_service_request.ListRuleSetsResponse,
+                Awaitable[ruleset_service_request.ListRuleSetsResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'RuleSetServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py
new file mode 100644
index 000000000000..de5c27e26e00
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc.py
@@ -0,0 +1,397 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import RuleSetServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class RuleSetServiceGrpcTransport(RuleSetServiceTransport):
+    """gRPC backend transport for RuleSetService.
+
+    Service to manage customer specific RuleSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def create_rule_set(self) -> Callable[
+            [ruleset_service_request.CreateRuleSetRequest],
+            rule_engine.RuleSet]:
+        r"""Return a callable for the create rule set method over gRPC.
+
+        Creates a ruleset.
+
+        Returns:
+            Callable[[~.CreateRuleSetRequest],
+                    ~.RuleSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_rule_set' not in self._stubs:
+            self._stubs['create_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/CreateRuleSet',
+                request_serializer=ruleset_service_request.CreateRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['create_rule_set']
+
+    @property
+    def get_rule_set(self) -> Callable[
+            [ruleset_service_request.GetRuleSetRequest],
+            rule_engine.RuleSet]:
+        r"""Return a callable for the get rule set method over gRPC.
+
+        Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
+
+        Returns:
+            Callable[[~.GetRuleSetRequest],
+                    ~.RuleSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_rule_set' not in self._stubs:
+            self._stubs['get_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/GetRuleSet',
+                request_serializer=ruleset_service_request.GetRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['get_rule_set']
+
+    @property
+    def update_rule_set(self) -> Callable[
+            [ruleset_service_request.UpdateRuleSetRequest],
+            rule_engine.RuleSet]:
+        r"""Return a callable for the update rule set method over gRPC.
+
+        Updates a ruleset. Returns INVALID_ARGUMENT if the name of the
+        ruleset is non-empty and does not equal the existing name.
+
+        Returns:
+            Callable[[~.UpdateRuleSetRequest],
+                    ~.RuleSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_rule_set' not in self._stubs:
+            self._stubs['update_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/UpdateRuleSet',
+                request_serializer=ruleset_service_request.UpdateRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['update_rule_set']
+
+    @property
+    def delete_rule_set(self) -> Callable[
+            [ruleset_service_request.DeleteRuleSetRequest],
+            empty_pb2.Empty]:
+        r"""Return a callable for the delete rule set method over gRPC.
+
+        Deletes a ruleset. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.DeleteRuleSetRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_rule_set' not in self._stubs:
+            self._stubs['delete_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/DeleteRuleSet',
+                request_serializer=ruleset_service_request.DeleteRuleSetRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_rule_set']
+
+    @property
+    def list_rule_sets(self) -> Callable[
+            [ruleset_service_request.ListRuleSetsRequest],
+            ruleset_service_request.ListRuleSetsResponse]:
+        r"""Return a callable for the list rule sets method over gRPC.
+
+        Lists rulesets.
+
+        Returns:
+            Callable[[~.ListRuleSetsRequest],
+                    ~.ListRuleSetsResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_rule_sets' not in self._stubs:
+            self._stubs['list_rule_sets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/ListRuleSets',
+                request_serializer=ruleset_service_request.ListRuleSetsRequest.serialize,
+                response_deserializer=ruleset_service_request.ListRuleSetsResponse.deserialize,
+            )
+        return self._stubs['list_rule_sets']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'RuleSetServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..1bc0158e0fc3
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/grpc_asyncio.py
@@ -0,0 +1,461 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import RuleSetServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import RuleSetServiceGrpcTransport
+
+
+class RuleSetServiceGrpcAsyncIOTransport(RuleSetServiceTransport):
+    """gRPC AsyncIO backend transport for RuleSetService.
+
+    Service to manage customer specific RuleSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def create_rule_set(self) -> Callable[
+            [ruleset_service_request.CreateRuleSetRequest],
+            Awaitable[rule_engine.RuleSet]]:
+        r"""Return a callable for the create rule set method over gRPC.
+
+        Creates a ruleset.
+
+        Returns:
+            Callable[[~.CreateRuleSetRequest],
+                    Awaitable[~.RuleSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_rule_set' not in self._stubs:
+            self._stubs['create_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/CreateRuleSet',
+                request_serializer=ruleset_service_request.CreateRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['create_rule_set']
+
+    @property
+    def get_rule_set(self) -> Callable[
+            [ruleset_service_request.GetRuleSetRequest],
+            Awaitable[rule_engine.RuleSet]]:
+        r"""Return a callable for the get rule set method over gRPC.
+
+        Gets a ruleset. Returns NOT_FOUND if the ruleset does not exist.
+
+        Returns:
+            Callable[[~.GetRuleSetRequest],
+                    Awaitable[~.RuleSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_rule_set' not in self._stubs:
+            self._stubs['get_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/GetRuleSet',
+                request_serializer=ruleset_service_request.GetRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['get_rule_set']
+
+    @property
+    def update_rule_set(self) -> Callable[
+            [ruleset_service_request.UpdateRuleSetRequest],
+            Awaitable[rule_engine.RuleSet]]:
+        r"""Return a callable for the update rule set method over gRPC.
+
+        Updates a ruleset. Returns INVALID_ARGUMENT if the name of the
+        ruleset is non-empty and does not equal the existing name.
+
+        Returns:
+            Callable[[~.UpdateRuleSetRequest],
+                    Awaitable[~.RuleSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_rule_set' not in self._stubs:
+            self._stubs['update_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/UpdateRuleSet',
+                request_serializer=ruleset_service_request.UpdateRuleSetRequest.serialize,
+                response_deserializer=rule_engine.RuleSet.deserialize,
+            )
+        return self._stubs['update_rule_set']
+
+    @property
+    def delete_rule_set(self) -> Callable[
+            [ruleset_service_request.DeleteRuleSetRequest],
+            Awaitable[empty_pb2.Empty]]:
+        r"""Return a callable for the delete rule set method over gRPC.
+
+        Deletes a ruleset. Returns NOT_FOUND if the document does not
+        exist.
+
+        Returns:
+            Callable[[~.DeleteRuleSetRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_rule_set' not in self._stubs:
+            self._stubs['delete_rule_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/DeleteRuleSet',
+                request_serializer=ruleset_service_request.DeleteRuleSetRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_rule_set']
+
+    @property
+    def list_rule_sets(self) -> Callable[
+            [ruleset_service_request.ListRuleSetsRequest],
+            Awaitable[ruleset_service_request.ListRuleSetsResponse]]:
+        r"""Return a callable for the list rule sets method over gRPC.
+
+        Lists rulesets.
+
+        Returns:
+            Callable[[~.ListRuleSetsRequest],
+                    Awaitable[~.ListRuleSetsResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_rule_sets' not in self._stubs:
+            self._stubs['list_rule_sets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.RuleSetService/ListRuleSets',
+                request_serializer=ruleset_service_request.ListRuleSetsRequest.serialize,
+                response_deserializer=ruleset_service_request.ListRuleSetsResponse.deserialize,
+            )
+        return self._stubs['list_rule_sets']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.create_rule_set: self._wrap_method(
+                self.create_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_rule_set: self._wrap_method(
+                self.get_rule_set,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_rule_set: self._wrap_method(
+                self.update_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_rule_set: self._wrap_method(
+                self.delete_rule_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_rule_sets: self._wrap_method(
+                self.list_rule_sets,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'RuleSetServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py
new file mode 100644
index 000000000000..eb4345f12f5b
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py
@@ -0,0 +1,779 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BaseRuleSetServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class RuleSetServiceRestInterceptor:
+    """Interceptor for RuleSetService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the RuleSetServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomRuleSetServiceInterceptor(RuleSetServiceRestInterceptor):
+            def pre_create_rule_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_create_rule_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_delete_rule_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def pre_get_rule_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_get_rule_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_list_rule_sets(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_list_rule_sets(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_update_rule_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_update_rule_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = RuleSetServiceRestTransport(interceptor=MyCustomRuleSetServiceInterceptor())
+        client = RuleSetServiceClient(transport=transport)
+
+
+    """
+    def pre_create_rule_set(self, request: ruleset_service_request.CreateRuleSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[ruleset_service_request.CreateRuleSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for create_rule_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def post_create_rule_set(self, response: rule_engine.RuleSet) -> rule_engine.RuleSet:
+        """Post-rpc interceptor for create_rule_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the RuleSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_delete_rule_set(self, request: ruleset_service_request.DeleteRuleSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[ruleset_service_request.DeleteRuleSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for delete_rule_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def pre_get_rule_set(self, request: ruleset_service_request.GetRuleSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[ruleset_service_request.GetRuleSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_rule_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def post_get_rule_set(self, response: rule_engine.RuleSet) -> rule_engine.RuleSet:
+        """Post-rpc interceptor for get_rule_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the RuleSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_list_rule_sets(self, request: ruleset_service_request.ListRuleSetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[ruleset_service_request.ListRuleSetsRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for list_rule_sets
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def post_list_rule_sets(self, response: ruleset_service_request.ListRuleSetsResponse) -> ruleset_service_request.ListRuleSetsResponse:
+        """Post-rpc interceptor for list_rule_sets
+
+        Override in a subclass to manipulate the response
+        after it is returned by the RuleSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_update_rule_set(self, request: ruleset_service_request.UpdateRuleSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[ruleset_service_request.UpdateRuleSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for update_rule_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def post_update_rule_set(self, response: rule_engine.RuleSet) -> rule_engine.RuleSet:
+        """Post-rpc interceptor for update_rule_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the RuleSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the RuleSetService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the RuleSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class RuleSetServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: RuleSetServiceRestInterceptor
+
+
+class RuleSetServiceRestTransport(_BaseRuleSetServiceRestTransport):
+    """REST backend synchronous transport for RuleSetService.
+
+    Service to manage customer specific RuleSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[RuleSetServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or RuleSetServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    class _CreateRuleSet(_BaseRuleSetServiceRestTransport._BaseCreateRuleSet, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.CreateRuleSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: ruleset_service_request.CreateRuleSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> rule_engine.RuleSet:
+            r"""Call the create rule set method over HTTP.
+
+            Args:
+                request (~.ruleset_service_request.CreateRuleSetRequest):
+                    The request object. Request message for
+                RuleSetService.CreateRuleSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.rule_engine.RuleSet:
+                    Represents a set of rules from a
+                single customer.
+
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseCreateRuleSet._get_http_options()
+            request, metadata = self._interceptor.pre_create_rule_set(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseCreateRuleSet._get_transcoded_request(http_options, request)
+
+            body = _BaseRuleSetServiceRestTransport._BaseCreateRuleSet._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseCreateRuleSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._CreateRuleSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = rule_engine.RuleSet()
+            pb_resp = rule_engine.RuleSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_create_rule_set(resp)
+            return resp
+
+    class _DeleteRuleSet(_BaseRuleSetServiceRestTransport._BaseDeleteRuleSet, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.DeleteRuleSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: ruleset_service_request.DeleteRuleSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ):
+            r"""Call the delete rule set method over HTTP.
+
+            Args:
+                request (~.ruleset_service_request.DeleteRuleSetRequest):
+                    The request object. Request message for
+                RuleSetService.DeleteRuleSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseDeleteRuleSet._get_http_options()
+            request, metadata = self._interceptor.pre_delete_rule_set(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseDeleteRuleSet._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseDeleteRuleSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._DeleteRuleSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+    class _GetRuleSet(_BaseRuleSetServiceRestTransport._BaseGetRuleSet, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.GetRuleSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: ruleset_service_request.GetRuleSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> rule_engine.RuleSet:
+            r"""Call the get rule set method over HTTP.
+
+            Args:
+                request (~.ruleset_service_request.GetRuleSetRequest):
+                    The request object. Request message for
+                RuleSetService.GetRuleSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.rule_engine.RuleSet:
+                    Represents a set of rules from a
+                single customer.
+
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseGetRuleSet._get_http_options()
+            request, metadata = self._interceptor.pre_get_rule_set(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseGetRuleSet._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseGetRuleSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._GetRuleSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = rule_engine.RuleSet()
+            pb_resp = rule_engine.RuleSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_get_rule_set(resp)
+            return resp
+
+    class _ListRuleSets(_BaseRuleSetServiceRestTransport._BaseListRuleSets, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.ListRuleSets")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: ruleset_service_request.ListRuleSetsRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> ruleset_service_request.ListRuleSetsResponse:
+            r"""Call the list rule sets method over HTTP.
+
+            Args:
+                request (~.ruleset_service_request.ListRuleSetsRequest):
+                    The request object. Request message for
+                RuleSetService.ListRuleSets.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.ruleset_service_request.ListRuleSetsResponse:
+                    Response message for
+                RuleSetService.ListRuleSets.
+
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseListRuleSets._get_http_options()
+            request, metadata = self._interceptor.pre_list_rule_sets(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseListRuleSets._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseListRuleSets._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._ListRuleSets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = ruleset_service_request.ListRuleSetsResponse()
+            pb_resp = ruleset_service_request.ListRuleSetsResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_list_rule_sets(resp)
+            return resp
+
+    class _UpdateRuleSet(_BaseRuleSetServiceRestTransport._BaseUpdateRuleSet, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.UpdateRuleSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: ruleset_service_request.UpdateRuleSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> rule_engine.RuleSet:
+            r"""Call the update rule set method over HTTP.
+
+            Args:
+                request (~.ruleset_service_request.UpdateRuleSetRequest):
+                    The request object. Request message for
+                RuleSetService.UpdateRuleSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.rule_engine.RuleSet:
+                    Represents a set of rules from a
+                single customer.
+
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseUpdateRuleSet._get_http_options()
+            request, metadata = self._interceptor.pre_update_rule_set(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseUpdateRuleSet._get_transcoded_request(http_options, request)
+
+            body = _BaseRuleSetServiceRestTransport._BaseUpdateRuleSet._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseUpdateRuleSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._UpdateRuleSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = rule_engine.RuleSet()
+            pb_resp = rule_engine.RuleSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_update_rule_set(resp)
+            return resp
+
+    @property
+    def create_rule_set(self) -> Callable[
+            [ruleset_service_request.CreateRuleSetRequest],
+            rule_engine.RuleSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._CreateRuleSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def delete_rule_set(self) -> Callable[
+            [ruleset_service_request.DeleteRuleSetRequest],
+            empty_pb2.Empty]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._DeleteRuleSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_rule_set(self) -> Callable[
+            [ruleset_service_request.GetRuleSetRequest],
+            rule_engine.RuleSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._GetRuleSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def list_rule_sets(self) -> Callable[
+            [ruleset_service_request.ListRuleSetsRequest],
+            ruleset_service_request.ListRuleSetsResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._ListRuleSets(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def update_rule_set(self) -> Callable[
+            [ruleset_service_request.UpdateRuleSetRequest],
+            rule_engine.RuleSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._UpdateRuleSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BaseRuleSetServiceRestTransport._BaseGetOperation, RuleSetServiceRestStub):
+        def __hash__(self):
+            return hash("RuleSetServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BaseRuleSetServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BaseRuleSetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseRuleSetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = RuleSetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'RuleSetServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest_base.py
new file mode 100644
index 000000000000..b224f78f3370
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest_base.py
@@ -0,0 +1,324 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import RuleSetServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BaseRuleSetServiceRestTransport(RuleSetServiceTransport):
+    """Base REST backend transport for RuleSetService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseCreateRuleSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*}/ruleSets',
+                'body': 'rule_set',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = ruleset_service_request.CreateRuleSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseRuleSetServiceRestTransport._BaseCreateRuleSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseDeleteRuleSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'delete',
+                'uri': '/v1/{name=projects/*/locations/*/ruleSets/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = ruleset_service_request.DeleteRuleSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseRuleSetServiceRestTransport._BaseDeleteRuleSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetRuleSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/ruleSets/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = ruleset_service_request.GetRuleSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseRuleSetServiceRestTransport._BaseGetRuleSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseListRuleSets:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{parent=projects/*/locations/*}/ruleSets',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = ruleset_service_request.ListRuleSetsRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseRuleSetServiceRestTransport._BaseListRuleSets._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseUpdateRuleSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'patch',
+                'uri': '/v1/{name=projects/*/locations/*/ruleSets/*}',
+                'body': '*',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = ruleset_service_request.UpdateRuleSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseRuleSetServiceRestTransport._BaseUpdateRuleSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BaseRuleSetServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/__init__.py
new file mode 100644
index 000000000000..5a84ed6ee9b6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import SynonymSetServiceClient
+from .async_client import SynonymSetServiceAsyncClient
+
+__all__ = (
+    'SynonymSetServiceClient',
+    'SynonymSetServiceAsyncClient',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py
new file mode 100644
index 000000000000..e958f31b0c49
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/async_client.py
@@ -0,0 +1,864 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core.client_options import ClientOptions
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.oauth2 import service_account              # type: ignore
+
+
+try:
+    OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import pagers
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import SynonymSetServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import SynonymSetServiceGrpcAsyncIOTransport
+from .client import SynonymSetServiceClient
+
+
+class SynonymSetServiceAsyncClient:
+    """A Service that manage/custom customer specified SynonymSets."""
+
+    _client: SynonymSetServiceClient
+
+    # Copy defaults from the synchronous client for use here.
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = SynonymSetServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = SynonymSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    _DEFAULT_ENDPOINT_TEMPLATE = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE
+    _DEFAULT_UNIVERSE = SynonymSetServiceClient._DEFAULT_UNIVERSE
+
+    location_path = staticmethod(SynonymSetServiceClient.location_path)
+    parse_location_path = staticmethod(SynonymSetServiceClient.parse_location_path)
+    synonym_set_path = staticmethod(SynonymSetServiceClient.synonym_set_path)
+    parse_synonym_set_path = staticmethod(SynonymSetServiceClient.parse_synonym_set_path)
+    common_billing_account_path = staticmethod(SynonymSetServiceClient.common_billing_account_path)
+    parse_common_billing_account_path = staticmethod(SynonymSetServiceClient.parse_common_billing_account_path)
+    common_folder_path = staticmethod(SynonymSetServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(SynonymSetServiceClient.parse_common_folder_path)
+    common_organization_path = staticmethod(SynonymSetServiceClient.common_organization_path)
+    parse_common_organization_path = staticmethod(SynonymSetServiceClient.parse_common_organization_path)
+    common_project_path = staticmethod(SynonymSetServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(SynonymSetServiceClient.parse_common_project_path)
+    common_location_path = staticmethod(SynonymSetServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(SynonymSetServiceClient.parse_common_location_path)
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            SynonymSetServiceAsyncClient: The constructed client.
+        """
+        return SynonymSetServiceClient.from_service_account_info.__func__(SynonymSetServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            SynonymSetServiceAsyncClient: The constructed client.
+        """
+        return SynonymSetServiceClient.from_service_account_file.__func__(SynonymSetServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None):
+        """Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+        return SynonymSetServiceClient.get_mtls_endpoint_and_cert_source(client_options)  # type: ignore
+
+    @property
+    def transport(self) -> SynonymSetServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            SynonymSetServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._client._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used
+                by the client instance.
+        """
+        return self._client._universe_domain
+
+    get_transport_class = SynonymSetServiceClient.get_transport_class
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, SynonymSetServiceTransport, Callable[..., SynonymSetServiceTransport]]] = "grpc_asyncio",
+            client_options: Optional[ClientOptions] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the synonym set service async client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,SynonymSetServiceTransport,Callable[..., SynonymSetServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport to use.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the SynonymSetServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = SynonymSetServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+
+        )
+
+    async def create_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.CreateSynonymSetRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            synonym_set: Optional[synonymset.SynonymSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Creates a SynonymSet for a single context. Throws an
+        ALREADY_EXISTS exception if a synonymset already exists for the
+        context.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_create_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateSynonymSetRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = await client.create_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.CreateSynonymSetRequest, dict]]):
+                The request object. Request message for
+                SynonymSetService.CreateSynonymSet.
+            parent (:class:`str`):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            synonym_set (:class:`google.cloud.contentwarehouse_v1.types.SynonymSet`):
+                Required. The synonymSet to be
+                created for a context
+
+                This corresponds to the ``synonym_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, synonym_set])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.CreateSynonymSetRequest):
+            request = synonymset_service_request.CreateSynonymSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if synonym_set is not None:
+            request.synonym_set = synonym_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.create_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.GetSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Gets a SynonymSet for a particular context. Throws a NOT_FOUND
+        exception if the Synonymset does not exist
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_get_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.get_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest, dict]]):
+                The request object. Request message for
+                SynonymSetService.GetSynonymSet. Will
+                return synonymSet for a certain context.
+            name (:class:`str`):
+                Required. The name of the synonymSet to retrieve Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.GetSynonymSetRequest):
+            request = synonymset_service_request.GetSynonymSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.get_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def update_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.UpdateSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            synonym_set: Optional[synonymset.SynonymSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Remove the existing SynonymSet for the context and replaces it
+        with a new one. Throws a NOT_FOUND exception if the SynonymSet
+        is not found.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_update_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.UpdateSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = await client.update_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest, dict]]):
+                The request object. Request message for
+                SynonymSetService.UpdateSynonymSet.
+                Removes the SynonymSet for the specified
+                context and replaces it with the
+                SynonymSet in this request.
+            name (:class:`str`):
+                Required. The name of the synonymSet to update Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            synonym_set (:class:`google.cloud.contentwarehouse_v1.types.SynonymSet`):
+                Required. The synonymSet to be
+                updated for the customer
+
+                This corresponds to the ``synonym_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, synonym_set])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.UpdateSynonymSetRequest):
+            request = synonymset_service_request.UpdateSynonymSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+        if synonym_set is not None:
+            request.synonym_set = synonym_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.update_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.DeleteSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a SynonymSet for a given context. Throws a NOT_FOUND
+        exception if the SynonymSet is not found.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_delete_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                await client.delete_synonym_set(request=request)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.DeleteSynonymSetRequest, dict]]):
+                The request object. Request message for
+                SynonymSetService.DeleteSynonymSet.
+            name (:class:`str`):
+                Required. The name of the synonymSet to delete Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.DeleteSynonymSetRequest):
+            request = synonymset_service_request.DeleteSynonymSetRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.delete_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    async def list_synonym_sets(self,
+            request: Optional[Union[synonymset_service_request.ListSynonymSetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListSynonymSetsAsyncPager:
+        r"""Returns all SynonymSets (for all contexts) for the
+        specified location.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            async def sample_list_synonym_sets():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListSynonymSetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_synonym_sets(request=request)
+
+                # Handle the response
+                async for response in page_result:
+                    print(response)
+
+        Args:
+            request (Optional[Union[google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest, dict]]):
+                The request object. Request message for
+                SynonymSetService.ListSynonymSets. Will
+                return all synonymSets belonging to the
+                customer project.
+            parent (:class:`str`):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.synonym_set_service.pagers.ListSynonymSetsAsyncPager:
+                Response message for
+                SynonymSetService.ListSynonymSets.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError("If the `request` argument is set, then none of "
+                             "the individual field arguments should be set.")
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.ListSynonymSetsRequest):
+            request = synonymset_service_request.ListSynonymSetsRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._client._transport._wrapped_methods[self._client._transport.list_synonym_sets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListSynonymSetsAsyncPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._client._validate_universe_domain()
+
+        # Send the request.
+        response = await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def __aenter__(self) -> "SynonymSetServiceAsyncClient":
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb):
+        await self.transport.close()
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "SynonymSetServiceAsyncClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py
new file mode 100644
index 000000000000..ceaa84305dc5
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py
@@ -0,0 +1,1227 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast
+import warnings
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+from google.api_core import client_options as client_options_lib
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials             # type: ignore
+from google.auth.transport import mtls                            # type: ignore
+from google.auth.transport.grpc import SslCredentials             # type: ignore
+from google.auth.exceptions import MutualTLSChannelError          # type: ignore
+from google.oauth2 import service_account                         # type: ignore
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import pagers
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from .transports.base import SynonymSetServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import SynonymSetServiceGrpcTransport
+from .transports.grpc_asyncio import SynonymSetServiceGrpcAsyncIOTransport
+from .transports.rest import SynonymSetServiceRestTransport
+
+
+class SynonymSetServiceClientMeta(type):
+    """Metaclass for the SynonymSetService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+    _transport_registry = OrderedDict()  # type: Dict[str, Type[SynonymSetServiceTransport]]
+    _transport_registry["grpc"] = SynonymSetServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = SynonymSetServiceGrpcAsyncIOTransport
+    _transport_registry["rest"] = SynonymSetServiceRestTransport
+
+    def get_transport_class(cls,
+            label: Optional[str] = None,
+        ) -> Type[SynonymSetServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class SynonymSetServiceClient(metaclass=SynonymSetServiceClientMeta):
+    """A Service that manage/custom customer specified SynonymSets."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
+    DEFAULT_ENDPOINT = "contentwarehouse.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    _DEFAULT_ENDPOINT_TEMPLATE = "contentwarehouse.{UNIVERSE_DOMAIN}"
+    _DEFAULT_UNIVERSE = "googleapis.com"
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            SynonymSetServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            SynonymSetServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(
+            filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> SynonymSetServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            SynonymSetServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def location_path(project: str,location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_location_path(path: str) -> Dict[str,str]:
+        """Parses a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def synonym_set_path(project: str,location: str,context: str,) -> str:
+        """Returns a fully-qualified synonym_set string."""
+        return "projects/{project}/locations/{location}/synonymSets/{context}".format(project=project, location=location, context=context, )
+
+    @staticmethod
+    def parse_synonym_set_path(path: str) -> Dict[str,str]:
+        """Parses a synonym_set path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/synonymSets/(?P<context>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str, ) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str,str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str, ) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder, )
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str,str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str, ) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization, )
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str,str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str, ) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project, )
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str,str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str, ) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(project=project, location=location, )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str,str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @classmethod
+    def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None):
+        """Deprecated. Return the API endpoint and client cert source for mutual TLS.
+
+        The client cert source is determined in the following order:
+        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
+        client cert source is None.
+        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
+        default client cert source exists, use the default one; otherwise the client cert
+        source is None.
+
+        The API endpoint is determined in the following order:
+        (1) if `client_options.api_endpoint` if provided, use the provided one.
+        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
+        default mTLS endpoint; if the environment variable is "never", use the default API
+        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
+        use the default API endpoint.
+
+        More details can be found at https://google.aip.dev/auth/4114.
+
+        Args:
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. Only the `api_endpoint` and `client_cert_source` properties may be used
+                in this method.
+
+        Returns:
+            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
+                client cert source to use.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
+        """
+
+        warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+            DeprecationWarning)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+
+        # Figure out the client cert source to use.
+        client_cert_source = None
+        if use_client_cert == "true":
+            if client_options.client_cert_source:
+                client_cert_source = client_options.client_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = cls.DEFAULT_ENDPOINT
+
+        return api_endpoint, client_cert_source
+
+    @staticmethod
+    def _read_environment_variables():
+        """Returns the environment variables used by the client.
+
+        Returns:
+            Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+            GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+        Raises:
+            ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+                any of ["true", "false"].
+            google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+                is not any of ["auto", "never", "always"].
+        """
+        use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower()
+        use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+        universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+        if use_client_cert not in ("true", "false"):
+            raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`")
+        if use_mtls_endpoint not in ("auto", "never", "always"):
+            raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`")
+        return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+    @staticmethod
+    def _get_client_cert_source(provided_cert_source, use_cert_flag):
+        """Return the client cert source to be used by the client.
+
+        Args:
+            provided_cert_source (bytes): The client certificate source provided.
+            use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+        Returns:
+            bytes or None: The client cert source to be used by the client.
+        """
+        client_cert_source = None
+        if use_cert_flag:
+            if provided_cert_source:
+                client_cert_source = provided_cert_source
+            elif mtls.has_default_client_cert_source():
+                client_cert_source = mtls.default_client_cert_source()
+        return client_cert_source
+
+    @staticmethod
+    def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint):
+        """Return the API endpoint used by the client.
+
+        Args:
+            api_override (str): The API endpoint override. If specified, this is always
+                the return value of this function and the other arguments are not used.
+            client_cert_source (bytes): The client certificate source used by the client.
+            universe_domain (str): The universe domain used by the client.
+            use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+                Possible values are "always", "auto", or "never".
+
+        Returns:
+            str: The API endpoint to be used by the client.
+        """
+        if api_override is not None:
+            api_endpoint = api_override
+        elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source):
+            _default_universe = SynonymSetServiceClient._DEFAULT_UNIVERSE
+            if universe_domain != _default_universe:
+                raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.")
+            api_endpoint = SynonymSetServiceClient.DEFAULT_MTLS_ENDPOINT
+        else:
+            api_endpoint = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain)
+        return api_endpoint
+
+    @staticmethod
+    def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str:
+        """Return the universe domain used by the client.
+
+        Args:
+            client_universe_domain (Optional[str]): The universe domain configured via the client options.
+            universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+        Returns:
+            str: The universe domain to be used by the client.
+
+        Raises:
+            ValueError: If the universe domain is an empty string.
+        """
+        universe_domain = SynonymSetServiceClient._DEFAULT_UNIVERSE
+        if client_universe_domain is not None:
+            universe_domain = client_universe_domain
+        elif universe_domain_env is not None:
+            universe_domain = universe_domain_env
+        if len(universe_domain.strip()) == 0:
+            raise ValueError("Universe Domain cannot be an empty string.")
+        return universe_domain
+
+    @staticmethod
+    def _compare_universes(client_universe: str,
+                           credentials: ga_credentials.Credentials) -> bool:
+        """Returns True iff the universe domains used by the client and credentials match.
+
+        Args:
+            client_universe (str): The universe domain configured via the client options.
+            credentials (ga_credentials.Credentials): The credentials being used in the client.
+
+        Returns:
+            bool: True iff client_universe matches the universe in credentials.
+
+        Raises:
+            ValueError: when client_universe does not match the universe in credentials.
+        """
+
+        default_universe = SynonymSetServiceClient._DEFAULT_UNIVERSE
+        credentials_universe = getattr(credentials, "universe_domain", default_universe)
+
+        if client_universe != credentials_universe:
+            raise ValueError("The configured universe domain "
+                f"({client_universe}) does not match the universe domain "
+                f"found in the credentials ({credentials_universe}). "
+                "If you haven't configured the universe domain explicitly, "
+                f"`{default_universe}` is the default.")
+        return True
+
+    def _validate_universe_domain(self):
+        """Validates client's and credentials' universe domains are consistent.
+
+        Returns:
+            bool: True iff the configured universe domain is valid.
+
+        Raises:
+            ValueError: If the configured universe domain is not valid.
+        """
+        self._is_universe_domain_valid = (self._is_universe_domain_valid or
+            SynonymSetServiceClient._compare_universes(self.universe_domain, self.transport._credentials))
+        return self._is_universe_domain_valid
+
+    @property
+    def api_endpoint(self):
+        """Return the API endpoint used by the client instance.
+
+        Returns:
+            str: The API endpoint used by the client instance.
+        """
+        return self._api_endpoint
+
+    @property
+    def universe_domain(self) -> str:
+        """Return the universe domain used by the client instance.
+
+        Returns:
+            str: The universe domain used by the client instance.
+        """
+        return self._universe_domain
+
+    def __init__(self, *,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            transport: Optional[Union[str, SynonymSetServiceTransport, Callable[..., SynonymSetServiceTransport]]] = None,
+            client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            ) -> None:
+        """Instantiates the synonym set service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Optional[Union[str,SynonymSetServiceTransport,Callable[..., SynonymSetServiceTransport]]]):
+                The transport to use, or a Callable that constructs and returns a new transport.
+                If a Callable is given, it will be called with the same set of initialization
+                arguments as used in the SynonymSetServiceTransport constructor.
+                If set to None, a transport is chosen automatically.
+            client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+                Custom options for the client.
+
+                1. The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client when ``transport`` is
+                not explicitly provided. Only if this property is not set and
+                ``transport`` was not explicitly provided, the endpoint is
+                determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+                variable, which have one of the following values:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto-switch to the
+                default mTLS endpoint if client certificate is present; this is
+                the default value).
+
+                2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide a client certificate for mTLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+                3. The ``universe_domain`` property can be used to override the
+                default "googleapis.com" universe. Note that the ``api_endpoint``
+                property still takes precedence; and ``universe_domain`` is
+                currently not supported for mTLS.
+
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client_options = client_options
+        if isinstance(self._client_options, dict):
+            self._client_options = client_options_lib.from_dict(self._client_options)
+        if self._client_options is None:
+            self._client_options = client_options_lib.ClientOptions()
+        self._client_options = cast(client_options_lib.ClientOptions, self._client_options)
+
+        universe_domain_opt = getattr(self._client_options, 'universe_domain', None)
+
+        self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SynonymSetServiceClient._read_environment_variables()
+        self._client_cert_source = SynonymSetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert)
+        self._universe_domain = SynonymSetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env)
+        self._api_endpoint = None # updated below, depending on `transport`
+
+        # Initialize the universe domain validation.
+        self._is_universe_domain_valid = False
+
+        api_key_value = getattr(self._client_options, "api_key", None)
+        if api_key_value and credentials:
+            raise ValueError("client_options.api_key and credentials are mutually exclusive")
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        transport_provided = isinstance(transport, SynonymSetServiceTransport)
+        if transport_provided:
+            # transport is a SynonymSetServiceTransport instance.
+            if credentials or self._client_options.credentials_file or api_key_value:
+                raise ValueError("When providing a transport instance, "
+                                 "provide its credentials directly.")
+            if self._client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = cast(SynonymSetServiceTransport, transport)
+            self._api_endpoint = self._transport.host
+
+        self._api_endpoint = (self._api_endpoint or
+            SynonymSetServiceClient._get_api_endpoint(
+                self._client_options.api_endpoint,
+                self._client_cert_source,
+                self._universe_domain,
+                self._use_mtls_endpoint))
+
+        if not transport_provided:
+            import google.auth._default  # type: ignore
+
+            if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"):
+                credentials = google.auth._default.get_api_key_credentials(api_key_value)
+
+            transport_init: Union[Type[SynonymSetServiceTransport], Callable[..., SynonymSetServiceTransport]] = (
+                SynonymSetServiceClient.get_transport_class(transport)
+                if isinstance(transport, str) or transport is None
+                else cast(Callable[..., SynonymSetServiceTransport], transport)
+            )
+            # initialize with the provided callable or the passed in class
+            self._transport = transport_init(
+                credentials=credentials,
+                credentials_file=self._client_options.credentials_file,
+                host=self._api_endpoint,
+                scopes=self._client_options.scopes,
+                client_cert_source_for_mtls=self._client_cert_source,
+                quota_project_id=self._client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=True,
+                api_audience=self._client_options.api_audience,
+            )
+
+    def create_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.CreateSynonymSetRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            synonym_set: Optional[synonymset.SynonymSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Creates a SynonymSet for a single context. Throws an
+        ALREADY_EXISTS exception if a synonymset already exists for the
+        context.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_create_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.CreateSynonymSetRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                response = client.create_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.CreateSynonymSetRequest, dict]):
+                The request object. Request message for
+                SynonymSetService.CreateSynonymSet.
+            parent (str):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            synonym_set (google.cloud.contentwarehouse_v1.types.SynonymSet):
+                Required. The synonymSet to be
+                created for a context
+
+                This corresponds to the ``synonym_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, synonym_set])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.CreateSynonymSetRequest):
+            request = synonymset_service_request.CreateSynonymSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if synonym_set is not None:
+                request.synonym_set = synonym_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.create_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def get_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.GetSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Gets a SynonymSet for a particular context. Throws a NOT_FOUND
+        exception if the Synonymset does not exist
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_get_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.GetSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.get_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest, dict]):
+                The request object. Request message for
+                SynonymSetService.GetSynonymSet. Will
+                return synonymSet for a certain context.
+            name (str):
+                Required. The name of the synonymSet to retrieve Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.GetSynonymSetRequest):
+            request = synonymset_service_request.GetSynonymSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def update_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.UpdateSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            synonym_set: Optional[synonymset.SynonymSet] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> synonymset.SynonymSet:
+        r"""Remove the existing SynonymSet for the context and replaces it
+        with a new one. Throws a NOT_FOUND exception if the SynonymSet
+        is not found.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_update_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.UpdateSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                response = client.update_synonym_set(request=request)
+
+                # Handle the response
+                print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest, dict]):
+                The request object. Request message for
+                SynonymSetService.UpdateSynonymSet.
+                Removes the SynonymSet for the specified
+                context and replaces it with the
+                SynonymSet in this request.
+            name (str):
+                Required. The name of the synonymSet to update Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            synonym_set (google.cloud.contentwarehouse_v1.types.SynonymSet):
+                Required. The synonymSet to be
+                updated for the customer
+
+                This corresponds to the ``synonym_set`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.types.SynonymSet:
+                Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name, synonym_set])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.UpdateSynonymSetRequest):
+            request = synonymset_service_request.UpdateSynonymSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+            if synonym_set is not None:
+                request.synonym_set = synonym_set
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.update_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_synonym_set(self,
+            request: Optional[Union[synonymset_service_request.DeleteSynonymSetRequest, dict]] = None,
+            *,
+            name: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> None:
+        r"""Deletes a SynonymSet for a given context. Throws a NOT_FOUND
+        exception if the SynonymSet is not found.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_delete_synonym_set():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.DeleteSynonymSetRequest(
+                    name="name_value",
+                )
+
+                # Make the request
+                client.delete_synonym_set(request=request)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.DeleteSynonymSetRequest, dict]):
+                The request object. Request message for
+                SynonymSetService.DeleteSynonymSet.
+            name (str):
+                Required. The name of the synonymSet to delete Format:
+                projects/{project_number}/locations/{location}/synonymSets/{context}.
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.DeleteSynonymSetRequest):
+            request = synonymset_service_request.DeleteSynonymSetRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.delete_synonym_set]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("name", request.name),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+    def list_synonym_sets(self,
+            request: Optional[Union[synonymset_service_request.ListSynonymSetsRequest, dict]] = None,
+            *,
+            parent: Optional[str] = None,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = (),
+            ) -> pagers.ListSynonymSetsPager:
+        r"""Returns all SynonymSets (for all contexts) for the
+        specified location.
+
+        .. code-block:: python
+
+            # This snippet has been automatically generated and should be regarded as a
+            # code template only.
+            # It will require modifications to work:
+            # - It may require correct/in-range values for request initialization.
+            # - It may require specifying regional endpoints when creating the service
+            #   client as shown in:
+            #   https://googleapis.dev/python/google-api-core/latest/client_options.html
+            from google.cloud import contentwarehouse_v1
+
+            def sample_list_synonym_sets():
+                # Create a client
+                client = contentwarehouse_v1.SynonymSetServiceClient()
+
+                # Initialize request argument(s)
+                request = contentwarehouse_v1.ListSynonymSetsRequest(
+                    parent="parent_value",
+                )
+
+                # Make the request
+                page_result = client.list_synonym_sets(request=request)
+
+                # Handle the response
+                for response in page_result:
+                    print(response)
+
+        Args:
+            request (Union[google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest, dict]):
+                The request object. Request message for
+                SynonymSetService.ListSynonymSets. Will
+                return all synonymSets belonging to the
+                customer project.
+            parent (str):
+                Required. The parent name. Format:
+                projects/{project_number}/locations/{location}.
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.contentwarehouse_v1.services.synonym_set_service.pagers.ListSynonymSetsPager:
+                Response message for
+                SynonymSetService.ListSynonymSets.
+                Iterating over this object will yield
+                results and resolve additional pages
+                automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # - Quick check: If we got a request object, we should *not* have
+        #   gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError('If the `request` argument is set, then none of '
+                             'the individual field arguments should be set.')
+
+        # - Use the request object if provided (there's no risk of modifying the input as
+        #   there are no flattened fields), or create one.
+        if not isinstance(request, synonymset_service_request.ListSynonymSetsRequest):
+            request = synonymset_service_request.ListSynonymSetsRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_synonym_sets]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ("parent", request.parent),
+            )),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListSynonymSetsPager(
+            method=rpc,
+            request=request,
+            response=response,
+            retry=retry,
+            timeout=timeout,
+            metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def __enter__(self) -> "SynonymSetServiceClient":
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Releases underlying transport's resources.
+
+        .. warning::
+            ONLY use as a context manager if the transport is NOT shared
+            with other clients! Exiting the with block will CLOSE the transport
+            and may cause errors in other clients!
+        """
+        self.transport.close()
+
+    def get_operation(
+        self,
+        request: Optional[operations_pb2.GetOperationRequest] = None,
+        *,
+        retry: OptionalRetry = gapic_v1.method.DEFAULT,
+        timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> operations_pb2.Operation:
+        r"""Gets the latest state of a long-running operation.
+
+        Args:
+            request (:class:`~.operations_pb2.GetOperationRequest`):
+                The request object. Request message for
+                `GetOperation` method.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                    if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        Returns:
+            ~.operations_pb2.Operation:
+                An ``Operation`` object.
+        """
+        # Create or coerce a protobuf request object.
+        # The request isn't a proto-plus wrapped type,
+        # so it must be constructed via keyword expansion.
+        if isinstance(request, dict):
+            request = operations_pb2.GetOperationRequest(**request)
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata(
+                (("name", request.name),)),
+        )
+
+        # Validate the universe domain.
+        self._validate_universe_domain()
+
+        # Send the request.
+        response = rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+
+
+
+
+
+
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+__all__ = (
+    "SynonymSetServiceClient",
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/pagers.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/pagers.py
new file mode 100644
index 000000000000..06f7c4e32715
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/pagers.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
+from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+    OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None]  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+
+
+class ListSynonymSetsPager:
+    """A pager for iterating through ``list_synonym_sets`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``synonym_sets`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListSynonymSets`` requests and continue to iterate
+    through the ``synonym_sets`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., synonymset_service_request.ListSynonymSetsResponse],
+            request: synonymset_service_request.ListSynonymSetsRequest,
+            response: synonymset_service_request.ListSynonymSetsResponse,
+            *,
+            retry: OptionalRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse):
+                The initial response object.
+            retry (google.api_core.retry.Retry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = synonymset_service_request.ListSynonymSetsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterator[synonymset_service_request.ListSynonymSetsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterator[synonymset.SynonymSet]:
+        for page in self.pages:
+            yield from page.synonym_sets
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
+
+
+class ListSynonymSetsAsyncPager:
+    """A pager for iterating through ``list_synonym_sets`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``synonym_sets`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListSynonymSets`` requests and continue to iterate
+    through the ``synonym_sets`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+    def __init__(self,
+            method: Callable[..., Awaitable[synonymset_service_request.ListSynonymSetsResponse]],
+            request: synonymset_service_request.ListSynonymSetsRequest,
+            response: synonymset_service_request.ListSynonymSetsResponse,
+            *,
+            retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+            timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+            metadata: Sequence[Tuple[str, str]] = ()):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest):
+                The initial request object.
+            response (google.cloud.contentwarehouse_v1.types.ListSynonymSetsResponse):
+                The initial response object.
+            retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+                if any, should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = synonymset_service_request.ListSynonymSetsRequest(request)
+        self._response = response
+        self._retry = retry
+        self._timeout = timeout
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(self) -> AsyncIterator[synonymset_service_request.ListSynonymSetsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata)
+            yield self._response
+    def __aiter__(self) -> AsyncIterator[synonymset.SynonymSet]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.synonym_sets:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/README.rst b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/README.rst
new file mode 100644
index 000000000000..ac041fd50093
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`SynonymSetServiceTransport` is the ABC for all transports.
+- public child `SynonymSetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `SynonymSetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseSynonymSetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `SynonymSetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py
new file mode 100644
index 000000000000..a94d27859e6f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/__init__.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import SynonymSetServiceTransport
+from .grpc import SynonymSetServiceGrpcTransport
+from .grpc_asyncio import SynonymSetServiceGrpcAsyncIOTransport
+from .rest import SynonymSetServiceRestTransport
+from .rest import SynonymSetServiceRestInterceptor
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[SynonymSetServiceTransport]]
+_transport_registry['grpc'] = SynonymSetServiceGrpcTransport
+_transport_registry['grpc_asyncio'] = SynonymSetServiceGrpcAsyncIOTransport
+_transport_registry['rest'] = SynonymSetServiceRestTransport
+
+__all__ = (
+    'SynonymSetServiceTransport',
+    'SynonymSetServiceGrpcTransport',
+    'SynonymSetServiceGrpcAsyncIOTransport',
+    'SynonymSetServiceRestTransport',
+    'SynonymSetServiceRestInterceptor',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py
new file mode 100644
index 000000000000..6028cac190fc
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/base.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+
+from google.cloud.contentwarehouse_v1 import gapic_version as package_version
+
+import google.auth  # type: ignore
+import google.api_core
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__)
+
+
+class SynonymSetServiceTransport(abc.ABC):
+    """Abstract transport class for SynonymSetService."""
+
+    AUTH_SCOPES = (
+        'https://www.googleapis.com/auth/cloud-platform',
+    )
+
+    DEFAULT_HOST: str = 'contentwarehouse.googleapis.com'
+    def __init__(
+            self, *,
+            host: str = DEFAULT_HOST,
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            **kwargs,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+
+        scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+        # Save the scopes.
+        self._scopes = scopes
+        if not hasattr(self, "_ignore_credentials"):
+            self._ignore_credentials: bool = False
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                                credentials_file,
+                                **scopes_kwargs,
+                                quota_project_id=quota_project_id
+                            )
+        elif credentials is None and not self._ignore_credentials:
+            credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
+            # Don't apply audience if the credentials file passed from user.
+            if hasattr(credentials, "with_gdch_audience"):
+                credentials = credentials.with_gdch_audience(api_audience if api_audience else host)
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ':' not in host:
+            host += ':443'
+        self._host = host
+
+    @property
+    def host(self):
+        return self._host
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.create_synonym_set: gapic_v1.method.wrap_method(
+                self.create_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_synonym_set: gapic_v1.method.wrap_method(
+                self.get_synonym_set,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_synonym_set: gapic_v1.method.wrap_method(
+                self.update_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_synonym_set: gapic_v1.method.wrap_method(
+                self.delete_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_synonym_sets: gapic_v1.method.wrap_method(
+                self.list_synonym_sets,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: gapic_v1.method.wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+         }
+
+    def close(self):
+        """Closes resources associated with the transport.
+
+       .. warning::
+            Only call this method if the transport is NOT shared
+            with other clients - this may cause errors in other clients!
+        """
+        raise NotImplementedError()
+
+    @property
+    def create_synonym_set(self) -> Callable[
+            [synonymset_service_request.CreateSynonymSetRequest],
+            Union[
+                synonymset.SynonymSet,
+                Awaitable[synonymset.SynonymSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_synonym_set(self) -> Callable[
+            [synonymset_service_request.GetSynonymSetRequest],
+            Union[
+                synonymset.SynonymSet,
+                Awaitable[synonymset.SynonymSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def update_synonym_set(self) -> Callable[
+            [synonymset_service_request.UpdateSynonymSetRequest],
+            Union[
+                synonymset.SynonymSet,
+                Awaitable[synonymset.SynonymSet]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def delete_synonym_set(self) -> Callable[
+            [synonymset_service_request.DeleteSynonymSetRequest],
+            Union[
+                empty_pb2.Empty,
+                Awaitable[empty_pb2.Empty]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def list_synonym_sets(self) -> Callable[
+            [synonymset_service_request.ListSynonymSetsRequest],
+            Union[
+                synonymset_service_request.ListSynonymSetsResponse,
+                Awaitable[synonymset_service_request.ListSynonymSetsResponse]
+            ]]:
+        raise NotImplementedError()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[
+        [operations_pb2.GetOperationRequest],
+        Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def kind(self) -> str:
+        raise NotImplementedError()
+
+
+__all__ = (
+    'SynonymSetServiceTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py
new file mode 100644
index 000000000000..4df042180849
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py
@@ -0,0 +1,402 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers
+from google.api_core import gapic_v1
+import google.auth                         # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import SynonymSetServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class SynonymSetServiceGrpcTransport(SynonymSetServiceTransport):
+    """gRPC backend transport for SynonymSetService.
+
+    A Service that manage/custom customer specified SynonymSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+    _stubs: Dict[str, Callable]
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if a ``channel`` instance is provided.
+            channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, grpc.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def create_synonym_set(self) -> Callable[
+            [synonymset_service_request.CreateSynonymSetRequest],
+            synonymset.SynonymSet]:
+        r"""Return a callable for the create synonym set method over gRPC.
+
+        Creates a SynonymSet for a single context. Throws an
+        ALREADY_EXISTS exception if a synonymset already exists for the
+        context.
+
+        Returns:
+            Callable[[~.CreateSynonymSetRequest],
+                    ~.SynonymSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_synonym_set' not in self._stubs:
+            self._stubs['create_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/CreateSynonymSet',
+                request_serializer=synonymset_service_request.CreateSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['create_synonym_set']
+
+    @property
+    def get_synonym_set(self) -> Callable[
+            [synonymset_service_request.GetSynonymSetRequest],
+            synonymset.SynonymSet]:
+        r"""Return a callable for the get synonym set method over gRPC.
+
+        Gets a SynonymSet for a particular context. Throws a NOT_FOUND
+        exception if the Synonymset does not exist
+
+        Returns:
+            Callable[[~.GetSynonymSetRequest],
+                    ~.SynonymSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_synonym_set' not in self._stubs:
+            self._stubs['get_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/GetSynonymSet',
+                request_serializer=synonymset_service_request.GetSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['get_synonym_set']
+
+    @property
+    def update_synonym_set(self) -> Callable[
+            [synonymset_service_request.UpdateSynonymSetRequest],
+            synonymset.SynonymSet]:
+        r"""Return a callable for the update synonym set method over gRPC.
+
+        Remove the existing SynonymSet for the context and replaces it
+        with a new one. Throws a NOT_FOUND exception if the SynonymSet
+        is not found.
+
+        Returns:
+            Callable[[~.UpdateSynonymSetRequest],
+                    ~.SynonymSet]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_synonym_set' not in self._stubs:
+            self._stubs['update_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/UpdateSynonymSet',
+                request_serializer=synonymset_service_request.UpdateSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['update_synonym_set']
+
+    @property
+    def delete_synonym_set(self) -> Callable[
+            [synonymset_service_request.DeleteSynonymSetRequest],
+            empty_pb2.Empty]:
+        r"""Return a callable for the delete synonym set method over gRPC.
+
+        Deletes a SynonymSet for a given context. Throws a NOT_FOUND
+        exception if the SynonymSet is not found.
+
+        Returns:
+            Callable[[~.DeleteSynonymSetRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_synonym_set' not in self._stubs:
+            self._stubs['delete_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/DeleteSynonymSet',
+                request_serializer=synonymset_service_request.DeleteSynonymSetRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_synonym_set']
+
+    @property
+    def list_synonym_sets(self) -> Callable[
+            [synonymset_service_request.ListSynonymSetsRequest],
+            synonymset_service_request.ListSynonymSetsResponse]:
+        r"""Return a callable for the list synonym sets method over gRPC.
+
+        Returns all SynonymSets (for all contexts) for the
+        specified location.
+
+        Returns:
+            Callable[[~.ListSynonymSetsRequest],
+                    ~.ListSynonymSetsResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_synonym_sets' not in self._stubs:
+            self._stubs['list_synonym_sets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/ListSynonymSets',
+                request_serializer=synonymset_service_request.ListSynonymSetsRequest.serialize,
+                response_deserializer=synonymset_service_request.ListSynonymSetsResponse.deserialize,
+            )
+        return self._stubs['list_synonym_sets']
+
+    def close(self):
+        self.grpc_channel.close()
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+    @property
+    def kind(self) -> str:
+        return "grpc"
+
+
+__all__ = (
+    'SynonymSetServiceGrpcTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..328fe2c52cbe
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc_asyncio.py
@@ -0,0 +1,466 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import inspect
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
+from google.auth import credentials as ga_credentials   # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc                        # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2  # type: ignore
+from .base import SynonymSetServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import SynonymSetServiceGrpcTransport
+
+
+class SynonymSetServiceGrpcAsyncIOTransport(SynonymSetServiceTransport):
+    """gRPC AsyncIO backend transport for SynonymSetService.
+
+    A Service that manage/custom customer specified SynonymSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(cls,
+                       host: str = 'contentwarehouse.googleapis.com',
+                       credentials: Optional[ga_credentials.Credentials] = None,
+                       credentials_file: Optional[str] = None,
+                       scopes: Optional[Sequence[str]] = None,
+                       quota_project_id: Optional[str] = None,
+                       **kwargs) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs
+        )
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
+            api_mtls_endpoint: Optional[str] = None,
+            client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
+            client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if a ``channel`` instance is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if a ``channel`` instance is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+                A ``Channel`` instance through which to make calls, or a Callable
+                that constructs and returns one. If set to None, ``self.create_channel``
+                is used to create the channel. If a Callable is given, it will be called
+                with the same arguments as used in ``self.create_channel``.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if a ``channel`` instance is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if isinstance(channel, aio.Channel):
+            # Ignore credentials if a channel was passed.
+            credentials = None
+            self._ignore_credentials = True
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience,
+        )
+
+        if not self._grpc_channel:
+            # initialize with the provided callable or the default channel
+            channel_init = channel or type(self).create_channel
+            self._grpc_channel = channel_init(
+                self._host,
+                # use the credentials which are saved
+                credentials=self._credentials,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def create_synonym_set(self) -> Callable[
+            [synonymset_service_request.CreateSynonymSetRequest],
+            Awaitable[synonymset.SynonymSet]]:
+        r"""Return a callable for the create synonym set method over gRPC.
+
+        Creates a SynonymSet for a single context. Throws an
+        ALREADY_EXISTS exception if a synonymset already exists for the
+        context.
+
+        Returns:
+            Callable[[~.CreateSynonymSetRequest],
+                    Awaitable[~.SynonymSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'create_synonym_set' not in self._stubs:
+            self._stubs['create_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/CreateSynonymSet',
+                request_serializer=synonymset_service_request.CreateSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['create_synonym_set']
+
+    @property
+    def get_synonym_set(self) -> Callable[
+            [synonymset_service_request.GetSynonymSetRequest],
+            Awaitable[synonymset.SynonymSet]]:
+        r"""Return a callable for the get synonym set method over gRPC.
+
+        Gets a SynonymSet for a particular context. Throws a NOT_FOUND
+        exception if the Synonymset does not exist
+
+        Returns:
+            Callable[[~.GetSynonymSetRequest],
+                    Awaitable[~.SynonymSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'get_synonym_set' not in self._stubs:
+            self._stubs['get_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/GetSynonymSet',
+                request_serializer=synonymset_service_request.GetSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['get_synonym_set']
+
+    @property
+    def update_synonym_set(self) -> Callable[
+            [synonymset_service_request.UpdateSynonymSetRequest],
+            Awaitable[synonymset.SynonymSet]]:
+        r"""Return a callable for the update synonym set method over gRPC.
+
+        Remove the existing SynonymSet for the context and replaces it
+        with a new one. Throws a NOT_FOUND exception if the SynonymSet
+        is not found.
+
+        Returns:
+            Callable[[~.UpdateSynonymSetRequest],
+                    Awaitable[~.SynonymSet]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'update_synonym_set' not in self._stubs:
+            self._stubs['update_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/UpdateSynonymSet',
+                request_serializer=synonymset_service_request.UpdateSynonymSetRequest.serialize,
+                response_deserializer=synonymset.SynonymSet.deserialize,
+            )
+        return self._stubs['update_synonym_set']
+
+    @property
+    def delete_synonym_set(self) -> Callable[
+            [synonymset_service_request.DeleteSynonymSetRequest],
+            Awaitable[empty_pb2.Empty]]:
+        r"""Return a callable for the delete synonym set method over gRPC.
+
+        Deletes a SynonymSet for a given context. Throws a NOT_FOUND
+        exception if the SynonymSet is not found.
+
+        Returns:
+            Callable[[~.DeleteSynonymSetRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'delete_synonym_set' not in self._stubs:
+            self._stubs['delete_synonym_set'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/DeleteSynonymSet',
+                request_serializer=synonymset_service_request.DeleteSynonymSetRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs['delete_synonym_set']
+
+    @property
+    def list_synonym_sets(self) -> Callable[
+            [synonymset_service_request.ListSynonymSetsRequest],
+            Awaitable[synonymset_service_request.ListSynonymSetsResponse]]:
+        r"""Return a callable for the list synonym sets method over gRPC.
+
+        Returns all SynonymSets (for all contexts) for the
+        specified location.
+
+        Returns:
+            Callable[[~.ListSynonymSetsRequest],
+                    Awaitable[~.ListSynonymSetsResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if 'list_synonym_sets' not in self._stubs:
+            self._stubs['list_synonym_sets'] = self.grpc_channel.unary_unary(
+                '/google.cloud.contentwarehouse.v1.SynonymSetService/ListSynonymSets',
+                request_serializer=synonymset_service_request.ListSynonymSetsRequest.serialize,
+                response_deserializer=synonymset_service_request.ListSynonymSetsResponse.deserialize,
+            )
+        return self._stubs['list_synonym_sets']
+
+    def _prep_wrapped_messages(self, client_info):
+        """ Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+        self._wrapped_methods = {
+            self.create_synonym_set: self._wrap_method(
+                self.create_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_synonym_set: self._wrap_method(
+                self.get_synonym_set,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.update_synonym_set: self._wrap_method(
+                self.update_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.delete_synonym_set: self._wrap_method(
+                self.delete_synonym_set,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.list_synonym_sets: self._wrap_method(
+                self.list_synonym_sets,
+                default_retry=retries.AsyncRetry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=60.0,
+                ),
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_operation: self._wrap_method(
+                self.get_operation,
+                default_timeout=None,
+                client_info=client_info,
+            ),
+        }
+
+    def _wrap_method(self, func, *args, **kwargs):
+        if self._wrap_with_kind:  # pragma: NO COVER
+            kwargs["kind"] = self.kind
+        return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
+    def close(self):
+        return self.grpc_channel.close()
+
+    @property
+    def kind(self) -> str:
+        return "grpc_asyncio"
+
+    @property
+    def get_operation(
+        self,
+    ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+        r"""Return a callable for the get_operation method over gRPC.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_operation" not in self._stubs:
+            self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+                "/google.longrunning.Operations/GetOperation",
+                request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
+                response_deserializer=operations_pb2.Operation.FromString,
+            )
+        return self._stubs["get_operation"]
+
+
+__all__ = (
+    'SynonymSetServiceGrpcAsyncIOTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py
new file mode 100644
index 000000000000..5c2e70f2ef2d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py
@@ -0,0 +1,806 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.auth.transport.requests import AuthorizedSession  # type: ignore
+import json  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry as retries
+from google.api_core import rest_helpers
+from google.api_core import rest_streaming
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+
+from requests import __version__ as requests_version
+import dataclasses
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+import warnings
+
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+from .rest_base import _BaseSynonymSetServiceRestTransport
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO
+
+try:
+    OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+except AttributeError:  # pragma: NO COVER
+    OptionalRetry = Union[retries.Retry, object, None]  # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+    gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+    grpc_version=None,
+    rest_version=f"requests@{requests_version}",
+)
+
+
+class SynonymSetServiceRestInterceptor:
+    """Interceptor for SynonymSetService.
+
+    Interceptors are used to manipulate requests, request metadata, and responses
+    in arbitrary ways.
+    Example use cases include:
+    * Logging
+    * Verifying requests according to service or custom semantics
+    * Stripping extraneous information from responses
+
+    These use cases and more can be enabled by injecting an
+    instance of a custom subclass when constructing the SynonymSetServiceRestTransport.
+
+    .. code-block:: python
+        class MyCustomSynonymSetServiceInterceptor(SynonymSetServiceRestInterceptor):
+            def pre_create_synonym_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_create_synonym_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_delete_synonym_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def pre_get_synonym_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_get_synonym_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_list_synonym_sets(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_list_synonym_sets(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+            def pre_update_synonym_set(self, request, metadata):
+                logging.log(f"Received request: {request}")
+                return request, metadata
+
+            def post_update_synonym_set(self, response):
+                logging.log(f"Received response: {response}")
+                return response
+
+        transport = SynonymSetServiceRestTransport(interceptor=MyCustomSynonymSetServiceInterceptor())
+        client = SynonymSetServiceClient(transport=transport)
+
+
+    """
+    def pre_create_synonym_set(self, request: synonymset_service_request.CreateSynonymSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[synonymset_service_request.CreateSynonymSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for create_synonym_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def post_create_synonym_set(self, response: synonymset.SynonymSet) -> synonymset.SynonymSet:
+        """Post-rpc interceptor for create_synonym_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the SynonymSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_delete_synonym_set(self, request: synonymset_service_request.DeleteSynonymSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[synonymset_service_request.DeleteSynonymSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for delete_synonym_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def pre_get_synonym_set(self, request: synonymset_service_request.GetSynonymSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[synonymset_service_request.GetSynonymSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_synonym_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def post_get_synonym_set(self, response: synonymset.SynonymSet) -> synonymset.SynonymSet:
+        """Post-rpc interceptor for get_synonym_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the SynonymSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_list_synonym_sets(self, request: synonymset_service_request.ListSynonymSetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[synonymset_service_request.ListSynonymSetsRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for list_synonym_sets
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def post_list_synonym_sets(self, response: synonymset_service_request.ListSynonymSetsResponse) -> synonymset_service_request.ListSynonymSetsResponse:
+        """Post-rpc interceptor for list_synonym_sets
+
+        Override in a subclass to manipulate the response
+        after it is returned by the SynonymSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_update_synonym_set(self, request: synonymset_service_request.UpdateSynonymSetRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[synonymset_service_request.UpdateSynonymSetRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for update_synonym_set
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def post_update_synonym_set(self, response: synonymset.SynonymSet) -> synonymset.SynonymSet:
+        """Post-rpc interceptor for update_synonym_set
+
+        Override in a subclass to manipulate the response
+        after it is returned by the SynonymSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+    def pre_get_operation(
+        self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]]
+    ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]:
+        """Pre-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the request or metadata
+        before they are sent to the SynonymSetService server.
+        """
+        return request, metadata
+
+    def post_get_operation(
+        self, response: operations_pb2.Operation
+    ) -> operations_pb2.Operation:
+        """Post-rpc interceptor for get_operation
+
+        Override in a subclass to manipulate the response
+        after it is returned by the SynonymSetService server but before
+        it is returned to user code.
+        """
+        return response
+
+
+@dataclasses.dataclass
+class SynonymSetServiceRestStub:
+    _session: AuthorizedSession
+    _host: str
+    _interceptor: SynonymSetServiceRestInterceptor
+
+
+class SynonymSetServiceRestTransport(_BaseSynonymSetServiceRestTransport):
+    """REST backend synchronous transport for SynonymSetService.
+
+    A Service that manage/custom customer specified SynonymSets.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[ga_credentials.Credentials] = None,
+            credentials_file: Optional[str] = None,
+            scopes: Optional[Sequence[str]] = None,
+            client_cert_source_for_mtls: Optional[Callable[[
+                ], Tuple[bytes, bytes]]] = None,
+            quota_project_id: Optional[str] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            interceptor: Optional[SynonymSetServiceRestInterceptor] = None,
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+                certificate to configure mutual TLS HTTP channel. It is ignored
+                if ``channel`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+        # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+        # credentials object
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            url_scheme=url_scheme,
+            api_audience=api_audience
+        )
+        self._session = AuthorizedSession(
+            self._credentials, default_host=self.DEFAULT_HOST)
+        if client_cert_source_for_mtls:
+            self._session.configure_mtls_channel(client_cert_source_for_mtls)
+        self._interceptor = interceptor or SynonymSetServiceRestInterceptor()
+        self._prep_wrapped_messages(client_info)
+
+    class _CreateSynonymSet(_BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.CreateSynonymSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: synonymset_service_request.CreateSynonymSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> synonymset.SynonymSet:
+            r"""Call the create synonym set method over HTTP.
+
+            Args:
+                request (~.synonymset_service_request.CreateSynonymSetRequest):
+                    The request object. Request message for
+                SynonymSetService.CreateSynonymSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.synonymset.SynonymSet:
+                    Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet._get_http_options()
+            request, metadata = self._interceptor.pre_create_synonym_set(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet._get_transcoded_request(http_options, request)
+
+            body = _BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._CreateSynonymSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = synonymset.SynonymSet()
+            pb_resp = synonymset.SynonymSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_create_synonym_set(resp)
+            return resp
+
+    class _DeleteSynonymSet(_BaseSynonymSetServiceRestTransport._BaseDeleteSynonymSet, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.DeleteSynonymSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: synonymset_service_request.DeleteSynonymSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ):
+            r"""Call the delete synonym set method over HTTP.
+
+            Args:
+                request (~.synonymset_service_request.DeleteSynonymSetRequest):
+                    The request object. Request message for
+                SynonymSetService.DeleteSynonymSet.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseDeleteSynonymSet._get_http_options()
+            request, metadata = self._interceptor.pre_delete_synonym_set(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseDeleteSynonymSet._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseDeleteSynonymSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._DeleteSynonymSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+    class _GetSynonymSet(_BaseSynonymSetServiceRestTransport._BaseGetSynonymSet, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.GetSynonymSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: synonymset_service_request.GetSynonymSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> synonymset.SynonymSet:
+            r"""Call the get synonym set method over HTTP.
+
+            Args:
+                request (~.synonymset_service_request.GetSynonymSetRequest):
+                    The request object. Request message for
+                SynonymSetService.GetSynonymSet. Will
+                return synonymSet for a certain context.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.synonymset.SynonymSet:
+                    Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseGetSynonymSet._get_http_options()
+            request, metadata = self._interceptor.pre_get_synonym_set(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseGetSynonymSet._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseGetSynonymSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._GetSynonymSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = synonymset.SynonymSet()
+            pb_resp = synonymset.SynonymSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_get_synonym_set(resp)
+            return resp
+
+    class _ListSynonymSets(_BaseSynonymSetServiceRestTransport._BaseListSynonymSets, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.ListSynonymSets")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+                request: synonymset_service_request.ListSynonymSetsRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> synonymset_service_request.ListSynonymSetsResponse:
+            r"""Call the list synonym sets method over HTTP.
+
+            Args:
+                request (~.synonymset_service_request.ListSynonymSetsRequest):
+                    The request object. Request message for
+                SynonymSetService.ListSynonymSets. Will
+                return all synonymSets belonging to the
+                customer project.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.synonymset_service_request.ListSynonymSetsResponse:
+                    Response message for
+                SynonymSetService.ListSynonymSets.
+
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseListSynonymSets._get_http_options()
+            request, metadata = self._interceptor.pre_list_synonym_sets(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseListSynonymSets._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseListSynonymSets._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._ListSynonymSets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = synonymset_service_request.ListSynonymSetsResponse()
+            pb_resp = synonymset_service_request.ListSynonymSetsResponse.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_list_synonym_sets(resp)
+            return resp
+
+    class _UpdateSynonymSet(_BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.UpdateSynonymSet")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                data=body,
+                )
+            return response
+
+        def __call__(self,
+                request: synonymset_service_request.UpdateSynonymSetRequest, *,
+                retry: OptionalRetry=gapic_v1.method.DEFAULT,
+                timeout: Optional[float]=None,
+                metadata: Sequence[Tuple[str, str]]=(),
+                ) -> synonymset.SynonymSet:
+            r"""Call the update synonym set method over HTTP.
+
+            Args:
+                request (~.synonymset_service_request.UpdateSynonymSetRequest):
+                    The request object. Request message for
+                SynonymSetService.UpdateSynonymSet.
+                Removes the SynonymSet for the specified
+                context and replaces it with the
+                SynonymSet in this request.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                ~.synonymset.SynonymSet:
+                    Represents a list of synonyms for a
+                given context. For example a context
+                "sales" could contain:
+
+                Synonym 1: sale, invoice, bill, order
+                Synonym 2: money, credit, finance,
+                payment Synonym 3: shipping, freight,
+                transport
+                Each SynonymSets should be disjoint
+
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet._get_http_options()
+            request, metadata = self._interceptor.pre_update_synonym_set(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet._get_transcoded_request(http_options, request)
+
+            body = _BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet._get_request_body_json(transcoded_request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._UpdateSynonymSet._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            # Return the response
+            resp = synonymset.SynonymSet()
+            pb_resp = synonymset.SynonymSet.pb(resp)
+
+            json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True)
+            resp = self._interceptor.post_update_synonym_set(resp)
+            return resp
+
+    @property
+    def create_synonym_set(self) -> Callable[
+            [synonymset_service_request.CreateSynonymSetRequest],
+            synonymset.SynonymSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._CreateSynonymSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def delete_synonym_set(self) -> Callable[
+            [synonymset_service_request.DeleteSynonymSetRequest],
+            empty_pb2.Empty]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._DeleteSynonymSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_synonym_set(self) -> Callable[
+            [synonymset_service_request.GetSynonymSetRequest],
+            synonymset.SynonymSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._GetSynonymSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def list_synonym_sets(self) -> Callable[
+            [synonymset_service_request.ListSynonymSetsRequest],
+            synonymset_service_request.ListSynonymSetsResponse]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._ListSynonymSets(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def update_synonym_set(self) -> Callable[
+            [synonymset_service_request.UpdateSynonymSetRequest],
+            synonymset.SynonymSet]:
+        # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here.
+        # In C++ this would require a dynamic_cast
+        return self._UpdateSynonymSet(self._session, self._host, self._interceptor) # type: ignore
+
+    @property
+    def get_operation(self):
+        return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore
+
+    class _GetOperation(_BaseSynonymSetServiceRestTransport._BaseGetOperation, SynonymSetServiceRestStub):
+        def __hash__(self):
+            return hash("SynonymSetServiceRestTransport.GetOperation")
+
+        @staticmethod
+        def _get_response(
+            host,
+            metadata,
+            query_params,
+            session,
+            timeout,
+            transcoded_request,
+            body=None):
+
+            uri = transcoded_request['uri']
+            method = transcoded_request['method']
+            headers = dict(metadata)
+            headers['Content-Type'] = 'application/json'
+            response = getattr(session, method)(
+                "{host}{uri}".format(host=host, uri=uri),
+                timeout=timeout,
+                headers=headers,
+                params=rest_helpers.flatten_query_params(query_params, strict=True),
+                )
+            return response
+
+        def __call__(self,
+            request: operations_pb2.GetOperationRequest, *,
+            retry: OptionalRetry=gapic_v1.method.DEFAULT,
+            timeout: Optional[float]=None,
+            metadata: Sequence[Tuple[str, str]]=(),
+            ) -> operations_pb2.Operation:
+
+            r"""Call the get operation method over HTTP.
+
+            Args:
+                request (operations_pb2.GetOperationRequest):
+                    The request object for GetOperation method.
+                retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                    should be retried.
+                timeout (float): The timeout for this request.
+                metadata (Sequence[Tuple[str, str]]): Strings which should be
+                    sent along with the request as metadata.
+
+            Returns:
+                operations_pb2.Operation: Response from GetOperation method.
+            """
+
+            http_options = _BaseSynonymSetServiceRestTransport._BaseGetOperation._get_http_options()
+            request, metadata = self._interceptor.pre_get_operation(request, metadata)
+            transcoded_request = _BaseSynonymSetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request)
+
+            # Jsonify the query params
+            query_params = _BaseSynonymSetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request)
+
+            # Send the request
+            response = SynonymSetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request)
+
+            # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+            # subclass.
+            if response.status_code >= 400:
+                raise core_exceptions.from_http_response(response)
+
+            content = response.content.decode("utf-8")
+            resp = operations_pb2.Operation()
+            resp = json_format.Parse(content, resp)
+            resp = self._interceptor.post_get_operation(resp)
+            return resp
+
+    @property
+    def kind(self) -> str:
+        return "rest"
+
+    def close(self):
+        self._session.close()
+
+
+__all__=(
+    'SynonymSetServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest_base.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest_base.py
new file mode 100644
index 000000000000..63c63a60a59c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest_base.py
@@ -0,0 +1,324 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import json  # type: ignore
+from google.api_core import path_template
+from google.api_core import gapic_v1
+
+from google.protobuf import json_format
+from .base import SynonymSetServiceTransport, DEFAULT_CLIENT_INFO
+
+import re
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
+
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.protobuf import empty_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+class _BaseSynonymSetServiceRestTransport(SynonymSetServiceTransport):
+    """Base REST backend transport for SynonymSetService.
+
+    Note: This class is not meant to be used directly. Use its sync and
+    async sub-classes instead.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends JSON representations of protocol buffers over HTTP/1.1
+    """
+
+    def __init__(self, *,
+            host: str = 'contentwarehouse.googleapis.com',
+            credentials: Optional[Any] = None,
+            client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+            always_use_jwt_access: Optional[bool] = False,
+            url_scheme: str = 'https',
+            api_audience: Optional[str] = None,
+            ) -> None:
+        """Instantiate the transport.
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to (default: 'contentwarehouse.googleapis.com').
+            credentials (Optional[Any]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you are developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+            url_scheme: the protocol scheme for the API endpoint.  Normally
+                "https", but for testing or local servers,
+                "http" can be specified.
+        """
+        # Run the base constructor
+        maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+        if maybe_url_match is None:
+            raise ValueError(f"Unexpected hostname structure: {host}")  # pragma: NO COVER
+
+        url_match_items = maybe_url_match.groupdict()
+
+        host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+            api_audience=api_audience
+        )
+
+    class _BaseCreateSynonymSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'post',
+                'uri': '/v1/{parent=projects/*/locations/*}/synonymSets',
+                'body': 'synonym_set',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = synonymset_service_request.CreateSynonymSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseSynonymSetServiceRestTransport._BaseCreateSynonymSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseDeleteSynonymSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'delete',
+                'uri': '/v1/{name=projects/*/locations/*/synonymSets/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = synonymset_service_request.DeleteSynonymSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseSynonymSetServiceRestTransport._BaseDeleteSynonymSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetSynonymSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/synonymSets/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = synonymset_service_request.GetSynonymSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseSynonymSetServiceRestTransport._BaseGetSynonymSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseListSynonymSets:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{parent=projects/*/locations/*}/synonymSets',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = synonymset_service_request.ListSynonymSetsRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseSynonymSetServiceRestTransport._BaseListSynonymSets._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseUpdateSynonymSet:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] =  {
+        }
+
+        @classmethod
+        def _get_unset_required_fields(cls, message_dict):
+            return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict}
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'patch',
+                'uri': '/v1/{name=projects/*/locations/*/synonymSets/*}',
+                'body': 'synonym_set',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            pb_request = synonymset_service_request.UpdateSynonymSetRequest.pb(request)
+            transcoded_request = path_template.transcode(http_options, pb_request)
+            return transcoded_request
+
+        @staticmethod
+        def _get_request_body_json(transcoded_request):
+            # Jsonify the request body
+
+            body = json_format.MessageToJson(
+                transcoded_request['body'],
+                use_integers_for_enums=True
+            )
+            return body
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json_format.MessageToJson(
+                transcoded_request['query_params'],
+                use_integers_for_enums=True,
+            ))
+            query_params.update(_BaseSynonymSetServiceRestTransport._BaseUpdateSynonymSet._get_unset_required_fields(query_params))
+
+            query_params["$alt"] = "json;enum-encoding=int"
+            return query_params
+
+    class _BaseGetOperation:
+        def __hash__(self):  # pragma: NO COVER
+            return NotImplementedError("__hash__ must be implemented.")
+
+        @staticmethod
+        def _get_http_options():
+            http_options: List[Dict[str, str]] = [{
+                'method': 'get',
+                'uri': '/v1/{name=projects/*/locations/*/operations/*}',
+            },
+            ]
+            return http_options
+
+        @staticmethod
+        def _get_transcoded_request(http_options, request):
+            request_kwargs = json_format.MessageToDict(request)
+            transcoded_request = path_template.transcode(
+                http_options, **request_kwargs)
+            return transcoded_request
+
+        @staticmethod
+        def _get_query_params_json(transcoded_request):
+            query_params = json.loads(json.dumps(transcoded_request['query_params']))
+            return query_params
+
+
+__all__=(
+    '_BaseSynonymSetServiceRestTransport',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/__init__.py
new file mode 100644
index 000000000000..c1d67233cb95
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/__init__.py
@@ -0,0 +1,274 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .async_document_service_request import (
+    CreateDocumentMetadata,
+    UpdateDocumentMetadata,
+)
+from .common import (
+    MergeFieldsOptions,
+    RequestMetadata,
+    ResponseMetadata,
+    UpdateOptions,
+    UserInfo,
+    AccessControlMode,
+    DatabaseType,
+    DocumentCreatorDefaultRole,
+    UpdateType,
+)
+from .document import (
+    DateTimeArray,
+    Document,
+    DocumentReference,
+    EnumArray,
+    EnumValue,
+    FloatArray,
+    IntegerArray,
+    MapProperty,
+    Property,
+    PropertyArray,
+    TextArray,
+    TimestampArray,
+    TimestampValue,
+    Value,
+    ContentCategory,
+    RawDocumentFileType,
+)
+from .document_link_service import (
+    CreateDocumentLinkRequest,
+    DeleteDocumentLinkRequest,
+    DocumentLink,
+    ListLinkedSourcesRequest,
+    ListLinkedSourcesResponse,
+    ListLinkedTargetsRequest,
+    ListLinkedTargetsResponse,
+)
+from .document_schema import (
+    DateTimeTypeOptions,
+    DocumentSchema,
+    EnumTypeOptions,
+    FloatTypeOptions,
+    IntegerTypeOptions,
+    MapTypeOptions,
+    PropertyDefinition,
+    PropertyTypeOptions,
+    TextTypeOptions,
+    TimestampTypeOptions,
+)
+from .document_schema_service import (
+    CreateDocumentSchemaRequest,
+    DeleteDocumentSchemaRequest,
+    GetDocumentSchemaRequest,
+    ListDocumentSchemasRequest,
+    ListDocumentSchemasResponse,
+    UpdateDocumentSchemaRequest,
+)
+from .document_service import (
+    CreateDocumentResponse,
+    FetchAclResponse,
+    QAResult,
+    SearchDocumentsResponse,
+    SetAclResponse,
+    UpdateDocumentResponse,
+)
+from .document_service_request import (
+    CloudAIDocumentOption,
+    CreateDocumentRequest,
+    DeleteDocumentRequest,
+    FetchAclRequest,
+    GetDocumentRequest,
+    LockDocumentRequest,
+    SearchDocumentsRequest,
+    SetAclRequest,
+    UpdateDocumentRequest,
+)
+from .filters import (
+    CustomWeightsMetadata,
+    DocumentQuery,
+    FileTypeFilter,
+    PropertyFilter,
+    TimeFilter,
+    WeightedSchemaProperty,
+)
+from .histogram import (
+    HistogramQuery,
+    HistogramQueryPropertyNameFilter,
+    HistogramQueryResult,
+)
+from .pipeline_service import (
+    RunPipelineRequest,
+)
+from .pipelines import (
+    ExportToCdwPipeline,
+    GcsIngestPipeline,
+    GcsIngestWithDocAiProcessorsPipeline,
+    IngestPipelineConfig,
+    ProcessorInfo,
+    ProcessWithDocAiPipeline,
+    RunPipelineMetadata,
+    RunPipelineResponse,
+)
+from .rule_engine import (
+    AccessControlAction,
+    Action,
+    ActionExecutorOutput,
+    ActionOutput,
+    AddToFolderAction,
+    DataUpdateAction,
+    DataValidationAction,
+    DeleteDocumentAction,
+    InvalidRule,
+    PublishAction,
+    RemoveFromFolderAction,
+    Rule,
+    RuleActionsPair,
+    RuleEngineOutput,
+    RuleEvaluatorOutput,
+    RuleSet,
+)
+from .ruleset_service_request import (
+    CreateRuleSetRequest,
+    DeleteRuleSetRequest,
+    GetRuleSetRequest,
+    ListRuleSetsRequest,
+    ListRuleSetsResponse,
+    UpdateRuleSetRequest,
+)
+from .synonymset import (
+    SynonymSet,
+)
+from .synonymset_service_request import (
+    CreateSynonymSetRequest,
+    DeleteSynonymSetRequest,
+    GetSynonymSetRequest,
+    ListSynonymSetsRequest,
+    ListSynonymSetsResponse,
+    UpdateSynonymSetRequest,
+)
+
+__all__ = (
+    'CreateDocumentMetadata',
+    'UpdateDocumentMetadata',
+    'MergeFieldsOptions',
+    'RequestMetadata',
+    'ResponseMetadata',
+    'UpdateOptions',
+    'UserInfo',
+    'AccessControlMode',
+    'DatabaseType',
+    'DocumentCreatorDefaultRole',
+    'UpdateType',
+    'DateTimeArray',
+    'Document',
+    'DocumentReference',
+    'EnumArray',
+    'EnumValue',
+    'FloatArray',
+    'IntegerArray',
+    'MapProperty',
+    'Property',
+    'PropertyArray',
+    'TextArray',
+    'TimestampArray',
+    'TimestampValue',
+    'Value',
+    'ContentCategory',
+    'RawDocumentFileType',
+    'CreateDocumentLinkRequest',
+    'DeleteDocumentLinkRequest',
+    'DocumentLink',
+    'ListLinkedSourcesRequest',
+    'ListLinkedSourcesResponse',
+    'ListLinkedTargetsRequest',
+    'ListLinkedTargetsResponse',
+    'DateTimeTypeOptions',
+    'DocumentSchema',
+    'EnumTypeOptions',
+    'FloatTypeOptions',
+    'IntegerTypeOptions',
+    'MapTypeOptions',
+    'PropertyDefinition',
+    'PropertyTypeOptions',
+    'TextTypeOptions',
+    'TimestampTypeOptions',
+    'CreateDocumentSchemaRequest',
+    'DeleteDocumentSchemaRequest',
+    'GetDocumentSchemaRequest',
+    'ListDocumentSchemasRequest',
+    'ListDocumentSchemasResponse',
+    'UpdateDocumentSchemaRequest',
+    'CreateDocumentResponse',
+    'FetchAclResponse',
+    'QAResult',
+    'SearchDocumentsResponse',
+    'SetAclResponse',
+    'UpdateDocumentResponse',
+    'CloudAIDocumentOption',
+    'CreateDocumentRequest',
+    'DeleteDocumentRequest',
+    'FetchAclRequest',
+    'GetDocumentRequest',
+    'LockDocumentRequest',
+    'SearchDocumentsRequest',
+    'SetAclRequest',
+    'UpdateDocumentRequest',
+    'CustomWeightsMetadata',
+    'DocumentQuery',
+    'FileTypeFilter',
+    'PropertyFilter',
+    'TimeFilter',
+    'WeightedSchemaProperty',
+    'HistogramQuery',
+    'HistogramQueryPropertyNameFilter',
+    'HistogramQueryResult',
+    'RunPipelineRequest',
+    'ExportToCdwPipeline',
+    'GcsIngestPipeline',
+    'GcsIngestWithDocAiProcessorsPipeline',
+    'IngestPipelineConfig',
+    'ProcessorInfo',
+    'ProcessWithDocAiPipeline',
+    'RunPipelineMetadata',
+    'RunPipelineResponse',
+    'AccessControlAction',
+    'Action',
+    'ActionExecutorOutput',
+    'ActionOutput',
+    'AddToFolderAction',
+    'DataUpdateAction',
+    'DataValidationAction',
+    'DeleteDocumentAction',
+    'InvalidRule',
+    'PublishAction',
+    'RemoveFromFolderAction',
+    'Rule',
+    'RuleActionsPair',
+    'RuleEngineOutput',
+    'RuleEvaluatorOutput',
+    'RuleSet',
+    'CreateRuleSetRequest',
+    'DeleteRuleSetRequest',
+    'GetRuleSetRequest',
+    'ListRuleSetsRequest',
+    'ListRuleSetsResponse',
+    'UpdateRuleSetRequest',
+    'SynonymSet',
+    'CreateSynonymSetRequest',
+    'DeleteSynonymSetRequest',
+    'GetSynonymSetRequest',
+    'ListSynonymSetsRequest',
+    'ListSynonymSetsResponse',
+    'UpdateSynonymSetRequest',
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/async_document_service_request.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/async_document_service_request.py
new file mode 100644
index 000000000000..fadce7fcd308
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/async_document_service_request.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CreateDocumentMetadata',
+        'UpdateDocumentMetadata',
+    },
+)
+
+
+class CreateDocumentMetadata(proto.Message):
+    r"""Metadata object for CreateDocument request (currently empty).
+    """
+
+
+class UpdateDocumentMetadata(proto.Message):
+    r"""Metadata object for UpdateDocument request (currently empty).
+    """
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/common.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/common.py
new file mode 100644
index 000000000000..a4a2cef2b9df
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/common.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.protobuf import field_mask_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'UpdateType',
+        'DatabaseType',
+        'AccessControlMode',
+        'DocumentCreatorDefaultRole',
+        'RequestMetadata',
+        'ResponseMetadata',
+        'UserInfo',
+        'UpdateOptions',
+        'MergeFieldsOptions',
+    },
+)
+
+
+class UpdateType(proto.Enum):
+    r"""Update type of the requests.
+
+    Values:
+        UPDATE_TYPE_UNSPECIFIED (0):
+            Defaults to full replace behavior, ie. FULL_REPLACE.
+        UPDATE_TYPE_REPLACE (1):
+            Fully replace all the fields (including
+            previously linked raw document). Any field masks
+            will be ignored.
+        UPDATE_TYPE_MERGE (2):
+            Merge the fields into the existing entities.
+        UPDATE_TYPE_INSERT_PROPERTIES_BY_NAMES (3):
+            Inserts the properties by names.
+        UPDATE_TYPE_REPLACE_PROPERTIES_BY_NAMES (4):
+            Replace the properties by names.
+        UPDATE_TYPE_DELETE_PROPERTIES_BY_NAMES (5):
+            Delete the properties by names.
+        UPDATE_TYPE_MERGE_AND_REPLACE_OR_INSERT_PROPERTIES_BY_NAMES (6):
+            For each of the property, replaces the
+            property if the it exists, otherwise inserts a
+            new property. And for the rest of the fields,
+            merge them based on update mask and merge fields
+            options.
+    """
+    UPDATE_TYPE_UNSPECIFIED = 0
+    UPDATE_TYPE_REPLACE = 1
+    UPDATE_TYPE_MERGE = 2
+    UPDATE_TYPE_INSERT_PROPERTIES_BY_NAMES = 3
+    UPDATE_TYPE_REPLACE_PROPERTIES_BY_NAMES = 4
+    UPDATE_TYPE_DELETE_PROPERTIES_BY_NAMES = 5
+    UPDATE_TYPE_MERGE_AND_REPLACE_OR_INSERT_PROPERTIES_BY_NAMES = 6
+
+
+class DatabaseType(proto.Enum):
+    r"""Type of database used by the customer
+
+    Values:
+        DB_UNKNOWN (0):
+            This value is required by protobuf best
+            practices
+        DB_INFRA_SPANNER (1):
+            Internal Spanner
+        DB_CLOUD_SQL_POSTGRES (2):
+            Cloud Sql with a Postgres Sql instance
+    """
+    DB_UNKNOWN = 0
+    DB_INFRA_SPANNER = 1
+    DB_CLOUD_SQL_POSTGRES = 2
+
+
+class AccessControlMode(proto.Enum):
+    r"""Access Control Mode.
+
+    Values:
+        ACL_MODE_UNKNOWN (0):
+            This value is required by protobuf best
+            practices
+        ACL_MODE_UNIVERSAL_ACCESS (1):
+            Universal Access: No document level access
+            control.
+        ACL_MODE_DOCUMENT_LEVEL_ACCESS_CONTROL_BYOID (2):
+            Document level access control with customer
+            own Identity Service.
+        ACL_MODE_DOCUMENT_LEVEL_ACCESS_CONTROL_GCI (3):
+            Document level access control using Google
+            Cloud Identity.
+    """
+    ACL_MODE_UNKNOWN = 0
+    ACL_MODE_UNIVERSAL_ACCESS = 1
+    ACL_MODE_DOCUMENT_LEVEL_ACCESS_CONTROL_BYOID = 2
+    ACL_MODE_DOCUMENT_LEVEL_ACCESS_CONTROL_GCI = 3
+
+
+class DocumentCreatorDefaultRole(proto.Enum):
+    r"""The default role of the document creator.
+
+    Values:
+        DOCUMENT_CREATOR_DEFAULT_ROLE_UNSPECIFIED (0):
+            Unspecified, will be default to document
+            admin role.
+        DOCUMENT_ADMIN (1):
+            Document Admin, same as
+            contentwarehouse.googleapis.com/documentAdmin.
+        DOCUMENT_EDITOR (2):
+            Document Editor, same as
+            contentwarehouse.googleapis.com/documentEditor.
+        DOCUMENT_VIEWER (3):
+            Document Viewer, same as
+            contentwarehouse.googleapis.com/documentViewer.
+    """
+    DOCUMENT_CREATOR_DEFAULT_ROLE_UNSPECIFIED = 0
+    DOCUMENT_ADMIN = 1
+    DOCUMENT_EDITOR = 2
+    DOCUMENT_VIEWER = 3
+
+
+class RequestMetadata(proto.Message):
+    r"""Meta information is used to improve the performance of the
+    service.
+
+    Attributes:
+        user_info (google.cloud.contentwarehouse_v1.types.UserInfo):
+            Provides user unique identification and
+            groups information.
+    """
+
+    user_info: 'UserInfo' = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message='UserInfo',
+    )
+
+
+class ResponseMetadata(proto.Message):
+    r"""Additional information returned to client, such as debugging
+    information.
+
+    Attributes:
+        request_id (str):
+            A unique id associated with this call. This
+            id is logged for tracking purpose.
+    """
+
+    request_id: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class UserInfo(proto.Message):
+    r"""The user information.
+
+    Attributes:
+        id (str):
+            A unique user identification string, as determined by the
+            client. The maximum number of allowed characters is 255.
+            Allowed characters include numbers 0 to 9, uppercase and
+            lowercase letters, and restricted special symbols (:, @, +,
+            -, \_, ~) The format is "user:xxxx@example.com";
+        group_ids (MutableSequence[str]):
+            The unique group identifications which the
+            user is belong to. The format is
+            "group:yyyy@example.com";
+    """
+
+    id: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    group_ids: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=2,
+    )
+
+
+class UpdateOptions(proto.Message):
+    r"""Options for Update operations.
+
+    Attributes:
+        update_type (google.cloud.contentwarehouse_v1.types.UpdateType):
+            Type for update.
+        update_mask (google.protobuf.field_mask_pb2.FieldMask):
+            Field mask for merging Document fields. For the
+            ``FieldMask`` definition, see
+            https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
+        merge_fields_options (google.cloud.contentwarehouse_v1.types.MergeFieldsOptions):
+            Options for merging.
+    """
+
+    update_type: 'UpdateType' = proto.Field(
+        proto.ENUM,
+        number=1,
+        enum='UpdateType',
+    )
+    update_mask: field_mask_pb2.FieldMask = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=field_mask_pb2.FieldMask,
+    )
+    merge_fields_options: 'MergeFieldsOptions' = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message='MergeFieldsOptions',
+    )
+
+
+class MergeFieldsOptions(proto.Message):
+    r"""Options for merging updated fields.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        replace_message_fields (bool):
+            When merging message fields, the default
+            behavior is to merge the content of two message
+            fields together. If you instead want to use the
+            field from the source message to replace the
+            corresponding field in the destination message,
+            set this flag to true. When this flag is set,
+            specified submessage fields that are missing in
+            source will be cleared in destination.
+
+            This field is a member of `oneof`_ ``_replace_message_fields``.
+        replace_repeated_fields (bool):
+            When merging repeated fields, the default behavior is to
+            append entries from the source repeated field to the
+            destination repeated field. If you instead want to keep only
+            the entries from the source repeated field, set this flag to
+            true.
+
+            If you want to replace a repeated field within a message
+            field on the destination message, you must set both
+            replace_repeated_fields and replace_message_fields to true,
+            otherwise the repeated fields will be appended.
+
+            This field is a member of `oneof`_ ``_replace_repeated_fields``.
+    """
+
+    replace_message_fields: bool = proto.Field(
+        proto.BOOL,
+        number=1,
+        optional=True,
+    )
+    replace_repeated_fields: bool = proto.Field(
+        proto.BOOL,
+        number=2,
+        optional=True,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document.py
new file mode 100644
index 000000000000..ccae04f47c29
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document.py
@@ -0,0 +1,735 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.documentai_v1.types import document as gcd_document
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.type import datetime_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'RawDocumentFileType',
+        'ContentCategory',
+        'Document',
+        'DocumentReference',
+        'Property',
+        'IntegerArray',
+        'FloatArray',
+        'TextArray',
+        'EnumArray',
+        'DateTimeArray',
+        'TimestampArray',
+        'TimestampValue',
+        'PropertyArray',
+        'MapProperty',
+        'Value',
+        'EnumValue',
+    },
+)
+
+
+class RawDocumentFileType(proto.Enum):
+    r"""When a raw document is supplied, this indicates the file
+    format
+
+    Values:
+        RAW_DOCUMENT_FILE_TYPE_UNSPECIFIED (0):
+            No raw document specified or it is
+            non-parsable
+        RAW_DOCUMENT_FILE_TYPE_PDF (1):
+            Adobe PDF format
+        RAW_DOCUMENT_FILE_TYPE_DOCX (2):
+            Microsoft Word format
+        RAW_DOCUMENT_FILE_TYPE_XLSX (3):
+            Microsoft Excel format
+        RAW_DOCUMENT_FILE_TYPE_PPTX (4):
+            Microsoft Powerpoint format
+        RAW_DOCUMENT_FILE_TYPE_TEXT (5):
+            UTF-8 encoded text format
+        RAW_DOCUMENT_FILE_TYPE_TIFF (6):
+            TIFF or TIF image file format
+    """
+    RAW_DOCUMENT_FILE_TYPE_UNSPECIFIED = 0
+    RAW_DOCUMENT_FILE_TYPE_PDF = 1
+    RAW_DOCUMENT_FILE_TYPE_DOCX = 2
+    RAW_DOCUMENT_FILE_TYPE_XLSX = 3
+    RAW_DOCUMENT_FILE_TYPE_PPTX = 4
+    RAW_DOCUMENT_FILE_TYPE_TEXT = 5
+    RAW_DOCUMENT_FILE_TYPE_TIFF = 6
+
+
+class ContentCategory(proto.Enum):
+    r"""When a raw document or structured content is supplied, this
+    stores the content category.
+
+    Values:
+        CONTENT_CATEGORY_UNSPECIFIED (0):
+            No category is specified.
+        CONTENT_CATEGORY_IMAGE (1):
+            Content is of image type.
+        CONTENT_CATEGORY_AUDIO (2):
+            Content is of audio type.
+        CONTENT_CATEGORY_VIDEO (3):
+            Content is of video type.
+    """
+    CONTENT_CATEGORY_UNSPECIFIED = 0
+    CONTENT_CATEGORY_IMAGE = 1
+    CONTENT_CATEGORY_AUDIO = 2
+    CONTENT_CATEGORY_VIDEO = 3
+
+
+class Document(proto.Message):
+    r"""Defines the structure for content warehouse document proto.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        name (str):
+            The resource name of the document. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+
+            The name is ignored when creating a document.
+        reference_id (str):
+            The reference ID set by customers. Must be
+            unique per project and location.
+        display_name (str):
+            Required. Display name of the document given
+            by the user. This name will be displayed in the
+            UI. Customer can populate this field with the
+            name of the document. This differs from the
+            'title' field as 'title' is optional and stores
+            the top heading in the document.
+        title (str):
+            Title that describes the document.
+            This can be the top heading or text that
+            describes the document.
+        display_uri (str):
+            Uri to display the document, for example, in
+            the UI.
+        document_schema_name (str):
+            The Document schema name. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+        plain_text (str):
+            Other document format, such as PPTX, XLXS
+
+            This field is a member of `oneof`_ ``structured_content``.
+        cloud_ai_document (google.cloud.documentai_v1.types.Document):
+            Document AI format to save the structured
+            content, including OCR.
+
+            This field is a member of `oneof`_ ``structured_content``.
+        structured_content_uri (str):
+            A path linked to structured content file.
+        raw_document_path (str):
+            Raw document file in Cloud Storage path.
+
+            This field is a member of `oneof`_ ``raw_document``.
+        inline_raw_document (bytes):
+            Raw document content.
+
+            This field is a member of `oneof`_ ``raw_document``.
+        properties (MutableSequence[google.cloud.contentwarehouse_v1.types.Property]):
+            List of values that are user supplied
+            metadata.
+        update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document is
+            last updated.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document is
+            created.
+        raw_document_file_type (google.cloud.contentwarehouse_v1.types.RawDocumentFileType):
+            This is used when DocAI was not used to load the document
+            and parsing/ extracting is needed for the
+            inline_raw_document. For example, if inline_raw_document is
+            the byte representation of a PDF file, then this should be
+            set to: RAW_DOCUMENT_FILE_TYPE_PDF.
+        async_enabled (bool):
+            If true, makes the document visible to
+            asynchronous policies and rules.
+        content_category (google.cloud.contentwarehouse_v1.types.ContentCategory):
+            Indicates the category (image, audio, video
+            etc.) of the original content.
+        text_extraction_disabled (bool):
+            If true, text extraction will not be
+            performed.
+        text_extraction_enabled (bool):
+            If true, text extraction will be performed.
+        creator (str):
+            The user who creates the document.
+        updater (str):
+            The user who lastly updates the document.
+        disposition_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. If linked to a Collection with
+            RetentionPolicy, the date when the document
+            becomes mutable.
+        legal_hold (bool):
+            Output only. Indicates if the document has a
+            legal hold on it.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    reference_id: str = proto.Field(
+        proto.STRING,
+        number=11,
+    )
+    display_name: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    title: str = proto.Field(
+        proto.STRING,
+        number=18,
+    )
+    display_uri: str = proto.Field(
+        proto.STRING,
+        number=17,
+    )
+    document_schema_name: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    plain_text: str = proto.Field(
+        proto.STRING,
+        number=15,
+        oneof='structured_content',
+    )
+    cloud_ai_document: gcd_document.Document = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='structured_content',
+        message=gcd_document.Document,
+    )
+    structured_content_uri: str = proto.Field(
+        proto.STRING,
+        number=16,
+    )
+    raw_document_path: str = proto.Field(
+        proto.STRING,
+        number=5,
+        oneof='raw_document',
+    )
+    inline_raw_document: bytes = proto.Field(
+        proto.BYTES,
+        number=6,
+        oneof='raw_document',
+    )
+    properties: MutableSequence['Property'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=7,
+        message='Property',
+    )
+    update_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=8,
+        message=timestamp_pb2.Timestamp,
+    )
+    create_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=9,
+        message=timestamp_pb2.Timestamp,
+    )
+    raw_document_file_type: 'RawDocumentFileType' = proto.Field(
+        proto.ENUM,
+        number=10,
+        enum='RawDocumentFileType',
+    )
+    async_enabled: bool = proto.Field(
+        proto.BOOL,
+        number=12,
+    )
+    content_category: 'ContentCategory' = proto.Field(
+        proto.ENUM,
+        number=20,
+        enum='ContentCategory',
+    )
+    text_extraction_disabled: bool = proto.Field(
+        proto.BOOL,
+        number=19,
+    )
+    text_extraction_enabled: bool = proto.Field(
+        proto.BOOL,
+        number=21,
+    )
+    creator: str = proto.Field(
+        proto.STRING,
+        number=13,
+    )
+    updater: str = proto.Field(
+        proto.STRING,
+        number=14,
+    )
+    disposition_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=22,
+        message=timestamp_pb2.Timestamp,
+    )
+    legal_hold: bool = proto.Field(
+        proto.BOOL,
+        number=23,
+    )
+
+
+class DocumentReference(proto.Message):
+    r"""References to the documents.
+
+    Attributes:
+        document_name (str):
+            Required. Name of the referenced document.
+        display_name (str):
+            display_name of the referenced document; this name does not
+            need to be consistent to the display_name in the Document
+            proto, depending on the ACL constraint.
+        snippet (str):
+            Stores the subset of the referenced
+            document's content. This is useful to allow user
+            peek the information of the referenced document.
+        document_is_folder (bool):
+            The document type of the document being
+            referenced.
+        update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document is
+            last updated.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document is
+            created.
+        delete_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document is
+            deleted.
+        document_is_retention_folder (bool):
+            Document is a folder with retention policy.
+        document_is_legal_hold_folder (bool):
+            Document is a folder with legal hold.
+    """
+
+    document_name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    display_name: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    snippet: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    document_is_folder: bool = proto.Field(
+        proto.BOOL,
+        number=4,
+    )
+    update_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message=timestamp_pb2.Timestamp,
+    )
+    create_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=timestamp_pb2.Timestamp,
+    )
+    delete_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=7,
+        message=timestamp_pb2.Timestamp,
+    )
+    document_is_retention_folder: bool = proto.Field(
+        proto.BOOL,
+        number=8,
+    )
+    document_is_legal_hold_folder: bool = proto.Field(
+        proto.BOOL,
+        number=9,
+    )
+
+
+class Property(proto.Message):
+    r"""Property of a document.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        name (str):
+            Required. Must match the name of a
+            PropertyDefinition in the DocumentSchema.
+        integer_values (google.cloud.contentwarehouse_v1.types.IntegerArray):
+            Integer property values.
+
+            This field is a member of `oneof`_ ``values``.
+        float_values (google.cloud.contentwarehouse_v1.types.FloatArray):
+            Float property values.
+
+            This field is a member of `oneof`_ ``values``.
+        text_values (google.cloud.contentwarehouse_v1.types.TextArray):
+            String/text property values.
+
+            This field is a member of `oneof`_ ``values``.
+        enum_values (google.cloud.contentwarehouse_v1.types.EnumArray):
+            Enum property values.
+
+            This field is a member of `oneof`_ ``values``.
+        property_values (google.cloud.contentwarehouse_v1.types.PropertyArray):
+            Nested structured data property values.
+
+            This field is a member of `oneof`_ ``values``.
+        date_time_values (google.cloud.contentwarehouse_v1.types.DateTimeArray):
+            Date time property values.
+            It is not supported by CMEK compliant
+            deployment.
+
+            This field is a member of `oneof`_ ``values``.
+        map_property (google.cloud.contentwarehouse_v1.types.MapProperty):
+            Map property values.
+
+            This field is a member of `oneof`_ ``values``.
+        timestamp_values (google.cloud.contentwarehouse_v1.types.TimestampArray):
+            Timestamp property values.
+            It is not supported by CMEK compliant
+            deployment.
+
+            This field is a member of `oneof`_ ``values``.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    integer_values: 'IntegerArray' = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        oneof='values',
+        message='IntegerArray',
+    )
+    float_values: 'FloatArray' = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        oneof='values',
+        message='FloatArray',
+    )
+    text_values: 'TextArray' = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='values',
+        message='TextArray',
+    )
+    enum_values: 'EnumArray' = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        oneof='values',
+        message='EnumArray',
+    )
+    property_values: 'PropertyArray' = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        oneof='values',
+        message='PropertyArray',
+    )
+    date_time_values: 'DateTimeArray' = proto.Field(
+        proto.MESSAGE,
+        number=7,
+        oneof='values',
+        message='DateTimeArray',
+    )
+    map_property: 'MapProperty' = proto.Field(
+        proto.MESSAGE,
+        number=8,
+        oneof='values',
+        message='MapProperty',
+    )
+    timestamp_values: 'TimestampArray' = proto.Field(
+        proto.MESSAGE,
+        number=9,
+        oneof='values',
+        message='TimestampArray',
+    )
+
+
+class IntegerArray(proto.Message):
+    r"""Integer values.
+
+    Attributes:
+        values (MutableSequence[int]):
+            List of integer values.
+    """
+
+    values: MutableSequence[int] = proto.RepeatedField(
+        proto.INT32,
+        number=1,
+    )
+
+
+class FloatArray(proto.Message):
+    r"""Float values.
+
+    Attributes:
+        values (MutableSequence[float]):
+            List of float values.
+    """
+
+    values: MutableSequence[float] = proto.RepeatedField(
+        proto.FLOAT,
+        number=1,
+    )
+
+
+class TextArray(proto.Message):
+    r"""String/text values.
+
+    Attributes:
+        values (MutableSequence[str]):
+            List of text values.
+    """
+
+    values: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+
+
+class EnumArray(proto.Message):
+    r"""Enum values.
+
+    Attributes:
+        values (MutableSequence[str]):
+            List of enum values.
+    """
+
+    values: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+
+
+class DateTimeArray(proto.Message):
+    r"""DateTime values.
+
+    Attributes:
+        values (MutableSequence[google.type.datetime_pb2.DateTime]):
+            List of datetime values.
+            Both OffsetDateTime and ZonedDateTime are
+            supported.
+    """
+
+    values: MutableSequence[datetime_pb2.DateTime] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=datetime_pb2.DateTime,
+    )
+
+
+class TimestampArray(proto.Message):
+    r"""Timestamp values.
+
+    Attributes:
+        values (MutableSequence[google.cloud.contentwarehouse_v1.types.TimestampValue]):
+            List of timestamp values.
+    """
+
+    values: MutableSequence['TimestampValue'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='TimestampValue',
+    )
+
+
+class TimestampValue(proto.Message):
+    r"""Timestamp value type.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        timestamp_value (google.protobuf.timestamp_pb2.Timestamp):
+            Timestamp value
+
+            This field is a member of `oneof`_ ``value``.
+        text_value (str):
+            The string must represent a valid instant in UTC and is
+            parsed using java.time.format.DateTimeFormatter.ISO_INSTANT.
+            e.g. "2013-09-29T18:46:19Z".
+
+            This field is a member of `oneof`_ ``value``.
+    """
+
+    timestamp_value: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        oneof='value',
+        message=timestamp_pb2.Timestamp,
+    )
+    text_value: str = proto.Field(
+        proto.STRING,
+        number=2,
+        oneof='value',
+    )
+
+
+class PropertyArray(proto.Message):
+    r"""Property values.
+
+    Attributes:
+        properties (MutableSequence[google.cloud.contentwarehouse_v1.types.Property]):
+            List of property values.
+    """
+
+    properties: MutableSequence['Property'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='Property',
+    )
+
+
+class MapProperty(proto.Message):
+    r"""Map property value.
+    Represents a structured entries of key value pairs, consisting
+    of field names which map to dynamically typed values.
+
+    Attributes:
+        fields (MutableMapping[str, google.cloud.contentwarehouse_v1.types.Value]):
+            Unordered map of dynamically typed values.
+    """
+
+    fields: MutableMapping[str, 'Value'] = proto.MapField(
+        proto.STRING,
+        proto.MESSAGE,
+        number=1,
+        message='Value',
+    )
+
+
+class Value(proto.Message):
+    r"""``Value`` represents a dynamically typed value which can be either
+    be a float, a integer, a string, or a datetime value. A producer of
+    value is expected to set one of these variants. Absence of any
+    variant indicates an error.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        float_value (float):
+            Represents a float value.
+
+            This field is a member of `oneof`_ ``kind``.
+        int_value (int):
+            Represents a integer value.
+
+            This field is a member of `oneof`_ ``kind``.
+        string_value (str):
+            Represents a string value.
+
+            This field is a member of `oneof`_ ``kind``.
+        enum_value (google.cloud.contentwarehouse_v1.types.EnumValue):
+            Represents an enum value.
+
+            This field is a member of `oneof`_ ``kind``.
+        datetime_value (google.type.datetime_pb2.DateTime):
+            Represents a datetime value.
+
+            This field is a member of `oneof`_ ``kind``.
+        timestamp_value (google.cloud.contentwarehouse_v1.types.TimestampValue):
+            Represents a timestamp value.
+
+            This field is a member of `oneof`_ ``kind``.
+        boolean_value (bool):
+            Represents a boolean value.
+
+            This field is a member of `oneof`_ ``kind``.
+    """
+
+    float_value: float = proto.Field(
+        proto.FLOAT,
+        number=1,
+        oneof='kind',
+    )
+    int_value: int = proto.Field(
+        proto.INT32,
+        number=2,
+        oneof='kind',
+    )
+    string_value: str = proto.Field(
+        proto.STRING,
+        number=3,
+        oneof='kind',
+    )
+    enum_value: 'EnumValue' = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='kind',
+        message='EnumValue',
+    )
+    datetime_value: datetime_pb2.DateTime = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        oneof='kind',
+        message=datetime_pb2.DateTime,
+    )
+    timestamp_value: 'TimestampValue' = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        oneof='kind',
+        message='TimestampValue',
+    )
+    boolean_value: bool = proto.Field(
+        proto.BOOL,
+        number=7,
+        oneof='kind',
+    )
+
+
+class EnumValue(proto.Message):
+    r"""Represents the string value of the enum field.
+
+    Attributes:
+        value (str):
+            String value of the enum field. This must
+            match defined set of enums in document schema
+            using EnumTypeOptions.
+    """
+
+    value: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_link_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_link_service.py
new file mode 100644
index 000000000000..7e38d2a320be
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_link_service.py
@@ -0,0 +1,306 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document
+from google.protobuf import timestamp_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'ListLinkedTargetsResponse',
+        'ListLinkedTargetsRequest',
+        'ListLinkedSourcesResponse',
+        'ListLinkedSourcesRequest',
+        'DocumentLink',
+        'CreateDocumentLinkRequest',
+        'DeleteDocumentLinkRequest',
+    },
+)
+
+
+class ListLinkedTargetsResponse(proto.Message):
+    r"""Response message for DocumentLinkService.ListLinkedTargets.
+
+    Attributes:
+        document_links (MutableSequence[google.cloud.contentwarehouse_v1.types.DocumentLink]):
+            Target document-links.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    document_links: MutableSequence['DocumentLink'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='DocumentLink',
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class ListLinkedTargetsRequest(proto.Message):
+    r"""Request message for DocumentLinkService.ListLinkedTargets.
+
+    Attributes:
+        parent (str):
+            Required. The name of the document, for which all target
+            links are returned. Format:
+            projects/{project_number}/locations/{location}/documents/{target_document_id}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the
+            document creator, used to enforce access control
+            for the service.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+
+
+class ListLinkedSourcesResponse(proto.Message):
+    r"""Response message for DocumentLinkService.ListLinkedSources.
+
+    Attributes:
+        document_links (MutableSequence[google.cloud.contentwarehouse_v1.types.DocumentLink]):
+            Source document-links.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    document_links: MutableSequence['DocumentLink'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='DocumentLink',
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class ListLinkedSourcesRequest(proto.Message):
+    r"""Response message for DocumentLinkService.ListLinkedSources.
+
+    Attributes:
+        parent (str):
+            Required. The name of the document, for which all source
+            links are returned. Format:
+            projects/{project_number}/locations/{location}/documents/{source_document_id}.
+        page_size (int):
+            The maximum number of document-links to
+            return. The service may return fewer than this
+            value.
+
+            If unspecified, at most 50 document-links will
+            be returned. The maximum value is 1000; values
+            above 1000 will be coerced to 1000.
+        page_token (str):
+            A page token, received from a previous ``ListLinkedSources``
+            call. Provide this to retrieve the subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListLinkedSources`` must match the call that provided the
+            page token.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the
+            document creator, used to enforce access control
+            for the service.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    page_size: int = proto.Field(
+        proto.INT32,
+        number=3,
+    )
+    page_token: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+
+
+class DocumentLink(proto.Message):
+    r"""A document-link between source and target document.
+
+    Attributes:
+        name (str):
+            Name of this document-link. It is required that the parent
+            derived form the name to be consistent with the source
+            document reference. Otherwise an exception will be thrown.
+            Format:
+            projects/{project_number}/locations/{location}/documents/{source_document_id}/documentLinks/{document_link_id}.
+        source_document_reference (google.cloud.contentwarehouse_v1.types.DocumentReference):
+            Document references of the source document.
+        target_document_reference (google.cloud.contentwarehouse_v1.types.DocumentReference):
+            Document references of the target document.
+        description (str):
+            Description of this document-link.
+        update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the documentLink
+            is last updated.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the documentLink
+            is created.
+        state (google.cloud.contentwarehouse_v1.types.DocumentLink.State):
+            The state of the documentlink. If target node
+            has been deleted, the link is marked as invalid.
+            Removing a source node will result in removal of
+            all associated links.
+    """
+    class State(proto.Enum):
+        r"""The state of a document-link.
+
+        Values:
+            STATE_UNSPECIFIED (0):
+                Unknown state of documentlink.
+            ACTIVE (1):
+                The documentlink has both source and target
+                documents detected.
+            SOFT_DELETED (2):
+                Target document is deleted, and mark the
+                documentlink as soft-deleted.
+        """
+        STATE_UNSPECIFIED = 0
+        ACTIVE = 1
+        SOFT_DELETED = 2
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    source_document_reference: document.DocumentReference = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=document.DocumentReference,
+    )
+    target_document_reference: document.DocumentReference = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=document.DocumentReference,
+    )
+    description: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+    update_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message=timestamp_pb2.Timestamp,
+    )
+    create_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=timestamp_pb2.Timestamp,
+    )
+    state: State = proto.Field(
+        proto.ENUM,
+        number=7,
+        enum=State,
+    )
+
+
+class CreateDocumentLinkRequest(proto.Message):
+    r"""Request message for DocumentLinkService.CreateDocumentLink.
+
+    Attributes:
+        parent (str):
+            Required. Parent of the document-link to be created. parent
+            of document-link should be a document. Format:
+            projects/{project_number}/locations/{location}/documents/{source_document_id}.
+        document_link (google.cloud.contentwarehouse_v1.types.DocumentLink):
+            Required. Document links associated with the source
+            documents (source_document_id).
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the
+            document creator, used to enforce access control
+            for the service.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document_link: 'DocumentLink' = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message='DocumentLink',
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.RequestMetadata,
+    )
+
+
+class DeleteDocumentLinkRequest(proto.Message):
+    r"""Request message for DocumentLinkService.DeleteDocumentLink.
+
+    Attributes:
+        name (str):
+            Required. The name of the document-link to be deleted.
+            Format:
+            projects/{project_number}/locations/{location}/documents/{source_document_id}/documentLinks/{document_link_id}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the
+            document creator, used to enforce access control
+            for the service.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema.py
new file mode 100644
index 000000000000..952dc1955aa0
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema.py
@@ -0,0 +1,388 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.protobuf import timestamp_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'DocumentSchema',
+        'PropertyDefinition',
+        'IntegerTypeOptions',
+        'FloatTypeOptions',
+        'TextTypeOptions',
+        'DateTimeTypeOptions',
+        'MapTypeOptions',
+        'TimestampTypeOptions',
+        'PropertyTypeOptions',
+        'EnumTypeOptions',
+    },
+)
+
+
+class DocumentSchema(proto.Message):
+    r"""A document schema used to define document structure.
+
+    Attributes:
+        name (str):
+            The resource name of the document schema. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+
+            The name is ignored when creating a document schema.
+        display_name (str):
+            Required. Name of the schema given by the
+            user. Must be unique per project.
+        property_definitions (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyDefinition]):
+            Document details.
+        document_is_folder (bool):
+            Document Type, true refers the document is a
+            folder, otherwise it is a typical document.
+        update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document
+            schema is last updated.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Output only. The time when the document
+            schema is created.
+        description (str):
+            Schema description.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    display_name: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    property_definitions: MutableSequence['PropertyDefinition'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=3,
+        message='PropertyDefinition',
+    )
+    document_is_folder: bool = proto.Field(
+        proto.BOOL,
+        number=4,
+    )
+    update_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message=timestamp_pb2.Timestamp,
+    )
+    create_time: timestamp_pb2.Timestamp = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=timestamp_pb2.Timestamp,
+    )
+    description: str = proto.Field(
+        proto.STRING,
+        number=7,
+    )
+
+
+class PropertyDefinition(proto.Message):
+    r"""Defines the metadata for a schema property.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        name (str):
+            Required. The name of the metadata property. Must be unique
+            within a document schema and is case insensitive. Names must
+            be non-blank, start with a letter, and can contain
+            alphanumeric characters and: /, :, -, \_, and .
+        display_name (str):
+            The display-name for the property, used for
+            front-end.
+        is_repeatable (bool):
+            Whether the property can have multiple
+            values.
+        is_filterable (bool):
+            Whether the property can be filtered. If this
+            is a sub-property, all the parent properties
+            must be marked filterable.
+        is_searchable (bool):
+            Indicates that the property should be
+            included in a global search.
+        is_metadata (bool):
+            Whether the property is user supplied
+            metadata. This out-of-the box placeholder
+            setting can be used to tag derived properties.
+            Its value and interpretation logic should be
+            implemented by API user.
+        is_required (bool):
+            Whether the property is mandatory.
+            Default is 'false', i.e. populating property
+            value can be skipped. If 'true' then user must
+            populate the value for this property.
+        retrieval_importance (google.cloud.contentwarehouse_v1.types.PropertyDefinition.RetrievalImportance):
+            The retrieval importance of the property
+            during search.
+        integer_type_options (google.cloud.contentwarehouse_v1.types.IntegerTypeOptions):
+            Integer property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        float_type_options (google.cloud.contentwarehouse_v1.types.FloatTypeOptions):
+            Float property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        text_type_options (google.cloud.contentwarehouse_v1.types.TextTypeOptions):
+            Text/string property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        property_type_options (google.cloud.contentwarehouse_v1.types.PropertyTypeOptions):
+            Nested structured data property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        enum_type_options (google.cloud.contentwarehouse_v1.types.EnumTypeOptions):
+            Enum/categorical property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        date_time_type_options (google.cloud.contentwarehouse_v1.types.DateTimeTypeOptions):
+            Date time property.
+            It is not supported by CMEK compliant
+            deployment.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        map_type_options (google.cloud.contentwarehouse_v1.types.MapTypeOptions):
+            Map property.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        timestamp_type_options (google.cloud.contentwarehouse_v1.types.TimestampTypeOptions):
+            Timestamp property.
+            It is not supported by CMEK compliant
+            deployment.
+
+            This field is a member of `oneof`_ ``value_type_options``.
+        schema_sources (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyDefinition.SchemaSource]):
+            The mapping information between this property
+            to another schema source.
+    """
+    class RetrievalImportance(proto.Enum):
+        r"""Stores the retrieval importance.
+
+        Values:
+            RETRIEVAL_IMPORTANCE_UNSPECIFIED (0):
+                No importance specified. Default medium
+                importance.
+            HIGHEST (1):
+                Highest importance.
+            HIGHER (2):
+                Higher importance.
+            HIGH (3):
+                High importance.
+            MEDIUM (4):
+                Medium importance.
+            LOW (5):
+                Low importance (negative).
+            LOWEST (6):
+                Lowest importance (negative).
+        """
+        RETRIEVAL_IMPORTANCE_UNSPECIFIED = 0
+        HIGHEST = 1
+        HIGHER = 2
+        HIGH = 3
+        MEDIUM = 4
+        LOW = 5
+        LOWEST = 6
+
+    class SchemaSource(proto.Message):
+        r"""The schema source information.
+
+        Attributes:
+            name (str):
+                The schema name in the source.
+            processor_type (str):
+                The Doc AI processor type name.
+        """
+
+        name: str = proto.Field(
+            proto.STRING,
+            number=1,
+        )
+        processor_type: str = proto.Field(
+            proto.STRING,
+            number=2,
+        )
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    display_name: str = proto.Field(
+        proto.STRING,
+        number=12,
+    )
+    is_repeatable: bool = proto.Field(
+        proto.BOOL,
+        number=2,
+    )
+    is_filterable: bool = proto.Field(
+        proto.BOOL,
+        number=3,
+    )
+    is_searchable: bool = proto.Field(
+        proto.BOOL,
+        number=4,
+    )
+    is_metadata: bool = proto.Field(
+        proto.BOOL,
+        number=5,
+    )
+    is_required: bool = proto.Field(
+        proto.BOOL,
+        number=14,
+    )
+    retrieval_importance: RetrievalImportance = proto.Field(
+        proto.ENUM,
+        number=18,
+        enum=RetrievalImportance,
+    )
+    integer_type_options: 'IntegerTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=7,
+        oneof='value_type_options',
+        message='IntegerTypeOptions',
+    )
+    float_type_options: 'FloatTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=8,
+        oneof='value_type_options',
+        message='FloatTypeOptions',
+    )
+    text_type_options: 'TextTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=9,
+        oneof='value_type_options',
+        message='TextTypeOptions',
+    )
+    property_type_options: 'PropertyTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=10,
+        oneof='value_type_options',
+        message='PropertyTypeOptions',
+    )
+    enum_type_options: 'EnumTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=11,
+        oneof='value_type_options',
+        message='EnumTypeOptions',
+    )
+    date_time_type_options: 'DateTimeTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=13,
+        oneof='value_type_options',
+        message='DateTimeTypeOptions',
+    )
+    map_type_options: 'MapTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=15,
+        oneof='value_type_options',
+        message='MapTypeOptions',
+    )
+    timestamp_type_options: 'TimestampTypeOptions' = proto.Field(
+        proto.MESSAGE,
+        number=16,
+        oneof='value_type_options',
+        message='TimestampTypeOptions',
+    )
+    schema_sources: MutableSequence[SchemaSource] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=19,
+        message=SchemaSource,
+    )
+
+
+class IntegerTypeOptions(proto.Message):
+    r"""Configurations for an integer property.
+    """
+
+
+class FloatTypeOptions(proto.Message):
+    r"""Configurations for a float property.
+    """
+
+
+class TextTypeOptions(proto.Message):
+    r"""Configurations for a text property.
+    """
+
+
+class DateTimeTypeOptions(proto.Message):
+    r"""Configurations for a date time property.
+    """
+
+
+class MapTypeOptions(proto.Message):
+    r"""Configurations for a Map property.
+    """
+
+
+class TimestampTypeOptions(proto.Message):
+    r"""Configurations for a timestamp property.
+    """
+
+
+class PropertyTypeOptions(proto.Message):
+    r"""Configurations for a nested structured data property.
+
+    Attributes:
+        property_definitions (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyDefinition]):
+            Required. List of property definitions.
+    """
+
+    property_definitions: MutableSequence['PropertyDefinition'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='PropertyDefinition',
+    )
+
+
+class EnumTypeOptions(proto.Message):
+    r"""Configurations for an enum/categorical property.
+
+    Attributes:
+        possible_values (MutableSequence[str]):
+            Required. List of possible enum values.
+        validation_check_disabled (bool):
+            Make sure the Enum property value provided in
+            the document is in the possile value list during
+            document creation. The validation check runs by
+            default.
+    """
+
+    possible_values: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+    validation_check_disabled: bool = proto.Field(
+        proto.BOOL,
+        number=2,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema_service.py
new file mode 100644
index 000000000000..7b7ee73dee1f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_schema_service.py
@@ -0,0 +1,182 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CreateDocumentSchemaRequest',
+        'GetDocumentSchemaRequest',
+        'UpdateDocumentSchemaRequest',
+        'DeleteDocumentSchemaRequest',
+        'ListDocumentSchemasRequest',
+        'ListDocumentSchemasResponse',
+    },
+)
+
+
+class CreateDocumentSchemaRequest(proto.Message):
+    r"""Request message for
+    DocumentSchemaService.CreateDocumentSchema.
+
+    Attributes:
+        parent (str):
+            Required. The parent name.
+        document_schema (google.cloud.contentwarehouse_v1.types.DocumentSchema):
+            Required. The document schema to create.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document_schema: gcc_document_schema.DocumentSchema = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=gcc_document_schema.DocumentSchema,
+    )
+
+
+class GetDocumentSchemaRequest(proto.Message):
+    r"""Request message for DocumentSchemaService.GetDocumentSchema.
+
+    Attributes:
+        name (str):
+            Required. The name of the document schema to
+            retrieve.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class UpdateDocumentSchemaRequest(proto.Message):
+    r"""Request message for
+    DocumentSchemaService.UpdateDocumentSchema.
+
+    Attributes:
+        name (str):
+            Required. The name of the document schema to update. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+        document_schema (google.cloud.contentwarehouse_v1.types.DocumentSchema):
+            Required. The document schema to update with.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document_schema: gcc_document_schema.DocumentSchema = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=gcc_document_schema.DocumentSchema,
+    )
+
+
+class DeleteDocumentSchemaRequest(proto.Message):
+    r"""Request message for
+    DocumentSchemaService.DeleteDocumentSchema.
+
+    Attributes:
+        name (str):
+            Required. The name of the document schema to
+            delete.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class ListDocumentSchemasRequest(proto.Message):
+    r"""Request message for
+    DocumentSchemaService.ListDocumentSchemas.
+
+    Attributes:
+        parent (str):
+            Required. The parent, which owns this collection of document
+            schemas. Format:
+            projects/{project_number}/locations/{location}.
+        page_size (int):
+            The maximum number of document schemas to
+            return. The service may return fewer than this
+            value. If unspecified, at most 50 document
+            schemas will be returned. The maximum value is
+            1000; values above 1000 will be coerced to 1000.
+        page_token (str):
+            A page token, received from a previous
+            ``ListDocumentSchemas`` call. Provide this to retrieve the
+            subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListDocumentSchemas`` must match the call that provided
+            the page token.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    page_size: int = proto.Field(
+        proto.INT32,
+        number=2,
+    )
+    page_token: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+
+
+class ListDocumentSchemasResponse(proto.Message):
+    r"""Response message for
+    DocumentSchemaService.ListDocumentSchemas.
+
+    Attributes:
+        document_schemas (MutableSequence[google.cloud.contentwarehouse_v1.types.DocumentSchema]):
+            The document schemas from the specified
+            parent.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    document_schemas: MutableSequence[gcc_document_schema.DocumentSchema] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=gcc_document_schema.DocumentSchema,
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service.py
new file mode 100644
index 000000000000..2018a3e3e790
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service.py
@@ -0,0 +1,327 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import histogram
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CreateDocumentResponse',
+        'UpdateDocumentResponse',
+        'QAResult',
+        'SearchDocumentsResponse',
+        'FetchAclResponse',
+        'SetAclResponse',
+    },
+)
+
+
+class CreateDocumentResponse(proto.Message):
+    r"""Response message for DocumentService.CreateDocument.
+
+    Attributes:
+        document (google.cloud.contentwarehouse_v1.types.Document):
+            Document created after executing create
+            request.
+        rule_engine_output (google.cloud.contentwarehouse_v1.types.RuleEngineOutput):
+            Output from Rule Engine recording the rule evaluator and
+            action executor's output.
+
+            Refer format in:
+            google/cloud/contentwarehouse/v1/rule_engine.proto
+        metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata):
+            Additional information for the API
+            invocation, such as the request tracking id.
+        long_running_operations (MutableSequence[google.longrunning.operations_pb2.Operation]):
+            post-processing LROs
+    """
+
+    document: gcc_document.Document = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=gcc_document.Document,
+    )
+    rule_engine_output: rule_engine.RuleEngineOutput = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=rule_engine.RuleEngineOutput,
+    )
+    metadata: common.ResponseMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.ResponseMetadata,
+    )
+    long_running_operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=4,
+        message=operations_pb2.Operation,
+    )
+
+
+class UpdateDocumentResponse(proto.Message):
+    r"""Response message for DocumentService.UpdateDocument.
+
+    Attributes:
+        document (google.cloud.contentwarehouse_v1.types.Document):
+            Updated document after executing update
+            request.
+        rule_engine_output (google.cloud.contentwarehouse_v1.types.RuleEngineOutput):
+            Output from Rule Engine recording the rule evaluator and
+            action executor's output.
+
+            Refer format in:
+            google/cloud/contentwarehouse/v1/rule_engine.proto
+        metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata):
+            Additional information for the API
+            invocation, such as the request tracking id.
+    """
+
+    document: gcc_document.Document = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=gcc_document.Document,
+    )
+    rule_engine_output: rule_engine.RuleEngineOutput = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=rule_engine.RuleEngineOutput,
+    )
+    metadata: common.ResponseMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.ResponseMetadata,
+    )
+
+
+class QAResult(proto.Message):
+    r"""Additional result info for the question-answering feature.
+
+    Attributes:
+        highlights (MutableSequence[google.cloud.contentwarehouse_v1.types.QAResult.Highlight]):
+            Highlighted sections in the snippet.
+        confidence_score (float):
+            The calibrated confidence score for this document, in the
+            range [0., 1.]. This represents the confidence level for
+            whether the returned document and snippet answers the user's
+            query.
+    """
+
+    class Highlight(proto.Message):
+        r"""A text span in the search text snippet that represents a
+        highlighted section (answer context, highly relevant sentence,
+        etc.).
+
+        Attributes:
+            start_index (int):
+                Start index of the highlight.
+            end_index (int):
+                End index of the highlight, exclusive.
+        """
+
+        start_index: int = proto.Field(
+            proto.INT32,
+            number=1,
+        )
+        end_index: int = proto.Field(
+            proto.INT32,
+            number=2,
+        )
+
+    highlights: MutableSequence[Highlight] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=Highlight,
+    )
+    confidence_score: float = proto.Field(
+        proto.FLOAT,
+        number=2,
+    )
+
+
+class SearchDocumentsResponse(proto.Message):
+    r"""Response message for DocumentService.SearchDocuments.
+
+    Attributes:
+        matching_documents (MutableSequence[google.cloud.contentwarehouse_v1.types.SearchDocumentsResponse.MatchingDocument]):
+            The document entities that match the specified
+            [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+        next_page_token (str):
+            The token that specifies the starting
+            position of the next page of results. This field
+            is empty if there are no more results.
+        total_size (int):
+            The total number of matched documents which is available
+            only if the client set
+            [SearchDocumentsRequest.require_total_size][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.require_total_size]
+            to ``true`` or set
+            [SearchDocumentsRequest.total_result_size][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.total_result_size]
+            to ``ESTIMATED_SIZE`` or ``ACTUAL_SIZE``. Otherwise, the
+            value will be ``-1``. Typically a UI would handle this
+            condition by displaying "of many", for example: "Displaying
+            10 of many".
+        metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata):
+            Additional information for the API
+            invocation, such as the request tracking id.
+        histogram_query_results (MutableSequence[google.cloud.contentwarehouse_v1.types.HistogramQueryResult]):
+            The histogram results that match with the specified
+            [SearchDocumentsRequest.histogram_queries][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.histogram_queries].
+        question_answer (str):
+            Experimental.
+            Question answer from the query against the
+            document.
+    """
+
+    class MatchingDocument(proto.Message):
+        r"""Document entry with metadata inside
+        [SearchDocumentsResponse][google.cloud.contentwarehouse.v1.SearchDocumentsResponse]
+
+        Attributes:
+            document (google.cloud.contentwarehouse_v1.types.Document):
+                Document that matches the specified
+                [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest].
+                This document only contains indexed metadata information.
+            search_text_snippet (str):
+                Contains snippets of text from the document full raw text
+                that most closely match a search query's keywords, if
+                available. All HTML tags in the original fields are stripped
+                when returned in this field, and matching query keywords are
+                enclosed in HTML bold tags.
+
+                If the question-answering feature is enabled, this field
+                will instead contain a snippet that answers the user's
+                natural-language query. No HTML bold tags will be present,
+                and highlights in the answer snippet can be found in
+                [QAResult.highlights][google.cloud.contentwarehouse.v1.QAResult.highlights].
+            qa_result (google.cloud.contentwarehouse_v1.types.QAResult):
+                Experimental.
+                Additional result info if the question-answering
+                feature is enabled.
+            matched_token_page_indices (MutableSequence[int]):
+                Return the 1-based page indices where those
+                pages have one or more matched tokens.
+        """
+
+        document: gcc_document.Document = proto.Field(
+            proto.MESSAGE,
+            number=1,
+            message=gcc_document.Document,
+        )
+        search_text_snippet: str = proto.Field(
+            proto.STRING,
+            number=2,
+        )
+        qa_result: 'QAResult' = proto.Field(
+            proto.MESSAGE,
+            number=3,
+            message='QAResult',
+        )
+        matched_token_page_indices: MutableSequence[int] = proto.RepeatedField(
+            proto.INT64,
+            number=4,
+        )
+
+    @property
+    def raw_page(self):
+        return self
+
+    matching_documents: MutableSequence[MatchingDocument] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=MatchingDocument,
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    total_size: int = proto.Field(
+        proto.INT32,
+        number=3,
+    )
+    metadata: common.ResponseMetadata = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        message=common.ResponseMetadata,
+    )
+    histogram_query_results: MutableSequence[histogram.HistogramQueryResult] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=6,
+        message=histogram.HistogramQueryResult,
+    )
+    question_answer: str = proto.Field(
+        proto.STRING,
+        number=7,
+    )
+
+
+class FetchAclResponse(proto.Message):
+    r"""Response message for DocumentService.FetchAcl.
+
+    Attributes:
+        policy (google.iam.v1.policy_pb2.Policy):
+            The IAM policy.
+        metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata):
+            Additional information for the API
+            invocation, such as the request tracking id.
+    """
+
+    policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=policy_pb2.Policy,
+    )
+    metadata: common.ResponseMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.ResponseMetadata,
+    )
+
+
+class SetAclResponse(proto.Message):
+    r"""Response message for DocumentService.SetAcl.
+
+    Attributes:
+        policy (google.iam.v1.policy_pb2.Policy):
+            The policy will be attached to a resource
+            (e.g. projecct, document).
+        metadata (google.cloud.contentwarehouse_v1.types.ResponseMetadata):
+            Additional information for the API
+            invocation, such as the request tracking id.
+    """
+
+    policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=policy_pb2.Policy,
+    )
+    metadata: common.ResponseMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.ResponseMetadata,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service_request.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service_request.py
new file mode 100644
index 000000000000..c452e9cdce95
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/document_service_request.py
@@ -0,0 +1,544 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import filters
+from google.cloud.contentwarehouse_v1.types import histogram
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.protobuf import field_mask_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CloudAIDocumentOption',
+        'CreateDocumentRequest',
+        'GetDocumentRequest',
+        'UpdateDocumentRequest',
+        'DeleteDocumentRequest',
+        'SearchDocumentsRequest',
+        'LockDocumentRequest',
+        'FetchAclRequest',
+        'SetAclRequest',
+    },
+)
+
+
+class CloudAIDocumentOption(proto.Message):
+    r"""Request Option for processing Cloud AI Document in CW
+    Document.
+
+    Attributes:
+        enable_entities_conversions (bool):
+            Whether to convert all the entities to
+            properties.
+        customized_entities_properties_conversions (MutableMapping[str, str]):
+            If set, only selected entities will be
+            converted to properties.
+    """
+
+    enable_entities_conversions: bool = proto.Field(
+        proto.BOOL,
+        number=1,
+    )
+    customized_entities_properties_conversions: MutableMapping[str, str] = proto.MapField(
+        proto.STRING,
+        proto.STRING,
+        number=2,
+    )
+
+
+class CreateDocumentRequest(proto.Message):
+    r"""Request message for DocumentService.CreateDocument.
+
+    Attributes:
+        parent (str):
+            Required. The parent name. Format:
+            projects/{project_number}/locations/{location}.
+        document (google.cloud.contentwarehouse_v1.types.Document):
+            Required. The document to create.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+        policy (google.iam.v1.policy_pb2.Policy):
+            Default document policy during creation.
+            This refers to an Identity and Access (IAM)
+            policy, which specifies access controls for the
+            Document.
+            Conditions defined in the policy will be
+            ignored.
+        cloud_ai_document_option (google.cloud.contentwarehouse_v1.types.CloudAIDocumentOption):
+            Request Option for processing Cloud AI
+            Document in Document Warehouse. This field
+            offers limited support for mapping entities from
+            Cloud AI Document to Warehouse Document. Please
+            consult with product team before using this
+            field and other available options.
+        create_mask (google.protobuf.field_mask_pb2.FieldMask):
+            Field mask for creating Document fields. If mask path is
+            empty, it means all fields are masked. For the ``FieldMask``
+            definition, see
+            https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document: gcc_document.Document = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=gcc_document.Document,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.RequestMetadata,
+    )
+    policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        message=policy_pb2.Policy,
+    )
+    cloud_ai_document_option: 'CloudAIDocumentOption' = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message='CloudAIDocumentOption',
+    )
+    create_mask: field_mask_pb2.FieldMask = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=field_mask_pb2.FieldMask,
+    )
+
+
+class GetDocumentRequest(proto.Message):
+    r"""Request message for DocumentService.GetDocument.
+
+    Attributes:
+        name (str):
+            Required. The name of the document to retrieve. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}
+            or
+            projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+
+
+class UpdateDocumentRequest(proto.Message):
+    r"""Request message for DocumentService.UpdateDocument.
+
+    Attributes:
+        name (str):
+            Required. The name of the document to update. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}
+            or
+            projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+        document (google.cloud.contentwarehouse_v1.types.Document):
+            Required. The document to update.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+        cloud_ai_document_option (google.cloud.contentwarehouse_v1.types.CloudAIDocumentOption):
+            Request Option for processing Cloud AI
+            Document in Document Warehouse. This field
+            offers limited support for mapping entities from
+            Cloud AI Document to Warehouse Document. Please
+            consult with product team before using this
+            field and other available options.
+        update_options (google.cloud.contentwarehouse_v1.types.UpdateOptions):
+            Options for the update operation.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document: gcc_document.Document = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=gcc_document.Document,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.RequestMetadata,
+    )
+    cloud_ai_document_option: 'CloudAIDocumentOption' = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message='CloudAIDocumentOption',
+    )
+    update_options: common.UpdateOptions = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=common.UpdateOptions,
+    )
+
+
+class DeleteDocumentRequest(proto.Message):
+    r"""Request message for DocumentService.DeleteDocument.
+
+    Attributes:
+        name (str):
+            Required. The name of the document to delete. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}
+            or
+            projects/{project_number}/locations/{location}/documents/referenceId/{reference_id}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+
+
+class SearchDocumentsRequest(proto.Message):
+    r"""Request message for DocumentService.SearchDocuments.
+
+    Attributes:
+        parent (str):
+            Required. The parent, which owns this collection of
+            documents. Format:
+            projects/{project_number}/locations/{location}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control and improve
+            the search quality of the service.
+        document_query (google.cloud.contentwarehouse_v1.types.DocumentQuery):
+            Query used to search against documents
+            (keyword, filters, etc.).
+        offset (int):
+            An integer that specifies the current offset (that is,
+            starting result location, amongst the documents deemed by
+            the API as relevant) in search results. This field is only
+            considered if
+            [page_token][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.page_token]
+            is unset.
+
+            The maximum allowed value is 5000. Otherwise an error is
+            thrown.
+
+            For example, 0 means to return results starting from the
+            first matching document, and 10 means to return from the
+            11th document. This can be used for pagination, (for
+            example, pageSize = 10 and offset = 10 means to return from
+            the second page).
+        page_size (int):
+            A limit on the number of documents returned
+            in the search results. Increasing this value
+            above the default value of 10 can increase
+            search response time. The value can be between 1
+            and 100.
+        page_token (str):
+            The token specifying the current offset within search
+            results. See
+            [SearchDocumentsResponse.next_page_token][google.cloud.contentwarehouse.v1.SearchDocumentsResponse.next_page_token]
+            for an explanation of how to obtain the next set of query
+            results.
+        order_by (str):
+            The criteria determining how search results are sorted. For
+            non-empty query, default is ``"relevance desc"``. For empty
+            query, default is ``"upload_date desc"``.
+
+            Supported options are:
+
+            -  ``"relevance desc"``: By relevance descending, as
+               determined by the API algorithms.
+            -  ``"upload_date desc"``: By upload date descending.
+            -  ``"upload_date"``: By upload date ascending.
+            -  ``"update_date desc"``: By last updated date descending.
+            -  ``"update_date"``: By last updated date ascending.
+            -  ``"retrieval_importance desc"``: By retrieval importance
+               of properties descending. This feature is still under
+               development, please do not use unless otherwise
+               instructed to do so.
+        histogram_queries (MutableSequence[google.cloud.contentwarehouse_v1.types.HistogramQuery]):
+            An expression specifying a histogram request against
+            matching documents. Expression syntax is an aggregation
+            function call with histogram facets and other options.
+
+            The following aggregation functions are supported:
+
+            -  ``count(string_histogram_facet)``: Count the number of
+               matching entities for each distinct attribute value.
+
+            Data types:
+
+            -  Histogram facet (aka filterable properties): Facet names
+               with format <schema id>.<facet>. Facets will have the
+               format of: ``[a-zA-Z][a-zA-Z0-9_:/-.]``. If the facet is
+               a child facet, then the parent hierarchy needs to be
+               specified separated by dots in the prefix after the
+               schema id. Thus, the format for a multi- level facet is:
+               <schema id>.<parent facet name>. <child facet name>.
+               Example:
+               schema123.root_parent_facet.middle_facet.child_facet
+            -  DocumentSchemaId: (with no schema id prefix) to get
+               histograms for each document type (returns the schema id
+               path, e.g.
+               projects/12345/locations/us-west/documentSchemas/abc123).
+
+            Example expression:
+
+            -  Document type counts: count('DocumentSchemaId')
+
+            -  For schema id, abc123, get the counts for MORTGAGE_TYPE:
+               count('abc123.MORTGAGE_TYPE')
+        require_total_size (bool):
+            Controls if the search document request requires the return
+            of a total size of matched documents. See
+            [SearchDocumentsResponse.total_size][google.cloud.contentwarehouse.v1.SearchDocumentsResponse.total_size].
+
+            Enabling this flag may adversely impact performance. Hint:
+            If this is used with pagination, set this flag on the
+            initial query but set this to false on subsequent page calls
+            (keep the total count locally).
+
+            Defaults to false.
+        total_result_size (google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest.TotalResultSize):
+            Controls if the search document request requires the return
+            of a total size of matched documents. See
+            [SearchDocumentsResponse.total_size][google.cloud.contentwarehouse.v1.SearchDocumentsResponse.total_size].
+        qa_size_limit (int):
+            Experimental, do not use. The limit on the number of
+            documents returned for the question-answering feature. To
+            enable the question-answering feature, set
+            [DocumentQuery].[is_nl_query][] to true.
+    """
+    class TotalResultSize(proto.Enum):
+        r"""The total number of matching documents.
+
+        Values:
+            TOTAL_RESULT_SIZE_UNSPECIFIED (0):
+                Total number calculation will be skipped.
+            ESTIMATED_SIZE (1):
+                Estimate total number. The total result size
+                will be accurated up to 10,000. This option will
+                add cost and latency to your request.
+            ACTUAL_SIZE (2):
+                It may adversely impact performance. The
+                limit is 1000,000.
+        """
+        TOTAL_RESULT_SIZE_UNSPECIFIED = 0
+        ESTIMATED_SIZE = 1
+        ACTUAL_SIZE = 2
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.RequestMetadata,
+    )
+    document_query: filters.DocumentQuery = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        message=filters.DocumentQuery,
+    )
+    offset: int = proto.Field(
+        proto.INT32,
+        number=5,
+    )
+    page_size: int = proto.Field(
+        proto.INT32,
+        number=6,
+    )
+    page_token: str = proto.Field(
+        proto.STRING,
+        number=7,
+    )
+    order_by: str = proto.Field(
+        proto.STRING,
+        number=8,
+    )
+    histogram_queries: MutableSequence[histogram.HistogramQuery] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=9,
+        message=histogram.HistogramQuery,
+    )
+    require_total_size: bool = proto.Field(
+        proto.BOOL,
+        number=10,
+    )
+    total_result_size: TotalResultSize = proto.Field(
+        proto.ENUM,
+        number=12,
+        enum=TotalResultSize,
+    )
+    qa_size_limit: int = proto.Field(
+        proto.INT32,
+        number=11,
+    )
+
+
+class LockDocumentRequest(proto.Message):
+    r"""Request message for DocumentService.LockDocument.
+
+    Attributes:
+        name (str):
+            Required. The name of the document to lock. Format:
+            projects/{project_number}/locations/{location}/documents/{document}.
+        collection_id (str):
+            The collection the document connects to.
+        locking_user (google.cloud.contentwarehouse_v1.types.UserInfo):
+            The user information who locks the document.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    collection_id: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    locking_user: common.UserInfo = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.UserInfo,
+    )
+
+
+class FetchAclRequest(proto.Message):
+    r"""Request message for DocumentService.FetchAcl
+
+    Attributes:
+        resource (str):
+            Required. REQUIRED: The resource for which the policy is
+            being requested. Format for document:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+            Format for collection:
+            projects/{project_number}/locations/{location}/collections/{collection_id}.
+            Format for project: projects/{project_number}.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+        project_owner (bool):
+            For Get Project ACL only. Authorization check for end user
+            will be ignored when project_owner=true.
+    """
+
+    resource: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=common.RequestMetadata,
+    )
+    project_owner: bool = proto.Field(
+        proto.BOOL,
+        number=3,
+    )
+
+
+class SetAclRequest(proto.Message):
+    r"""Request message for DocumentService.SetAcl.
+
+    Attributes:
+        resource (str):
+            Required. REQUIRED: The resource for which the policy is
+            being requested. Format for document:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+            Format for collection:
+            projects/{project_number}/locations/{location}/collections/{collection_id}.
+            Format for project: projects/{project_number}.
+        policy (google.iam.v1.policy_pb2.Policy):
+            Required. REQUIRED: The complete policy to be applied to the
+            ``resource``. The size of the policy is limited to a few 10s
+            of KB. This refers to an Identity and Access (IAM) policy,
+            which specifies access controls for the Document.
+
+            You can set ACL with condition for projects only.
+
+            Supported operators are: ``=``, ``!=``, ``<``, ``<=``,
+            ``>``, and ``>=`` where the left of the operator is
+            ``DocumentSchemaId`` or property name and the right of the
+            operator is a number or a quoted string. You must escape
+            backslash (\) and quote (") characters.
+
+            Boolean expressions (AND/OR) are supported up to 3 levels of
+            nesting (for example, "((A AND B AND C) OR D) AND E"), a
+            maximum of 10 comparisons are allowed in the expression. The
+            expression must be < 6000 bytes in length.
+
+            Sample condition:
+            ``"DocumentSchemaId = \"some schema id\" OR SchemaId.floatPropertyName >= 10"``
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+        project_owner (bool):
+            For Set Project ACL only. Authorization check for end user
+            will be ignored when project_owner=true.
+    """
+
+    resource: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=policy_pb2.Policy,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.RequestMetadata,
+    )
+    project_owner: bool = proto.Field(
+        proto.BOOL,
+        number=4,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/filters.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/filters.py
new file mode 100644
index 000000000000..5d16cde675db
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/filters.py
@@ -0,0 +1,423 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.type import interval_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'DocumentQuery',
+        'TimeFilter',
+        'PropertyFilter',
+        'FileTypeFilter',
+        'CustomWeightsMetadata',
+        'WeightedSchemaProperty',
+    },
+)
+
+
+class DocumentQuery(proto.Message):
+    r"""
+
+    Attributes:
+        query (str):
+            The query string that matches against the full text of the
+            document and the searchable properties.
+
+            The query partially supports `Google AIP style
+            syntax <https://google.aip.dev/160>`__. Specifically, the
+            query supports literals, logical operators, negation
+            operators, comparison operators, and functions.
+
+            Literals: A bare literal value (examples: "42", "Hugo") is a
+            value to be matched against. It searches over the full text
+            of the document and the searchable properties.
+
+            Logical operators: "AND", "and", "OR", and "or" are binary
+            logical operators (example: "engineer OR developer").
+
+            Negation operators: "NOT" and "!" are negation operators
+            (example: "NOT software").
+
+            Comparison operators: support the binary comparison
+            operators =, !=, <, >, <= and >= for string, numeric, enum,
+            boolean. Also support like operator ``~~`` for string. It
+            provides semantic search functionality by parsing, stemming
+            and doing synonyms expansion against the input query.
+
+            To specify a property in the query, the left hand side
+            expression in the comparison must be the property ID
+            including the parent. The right hand side must be literals.
+            For example: ""projects/123/locations/us".property_a < 1"
+            matches results whose "property_a" is less than 1 in project
+            123 and us location. The literals and comparison expression
+            can be connected in a single query (example: "software
+            engineer "projects/123/locations/us".salary > 100").
+
+            Functions: supported functions are
+            ``LOWER([property_name])`` to perform a case insensitive
+            match and ``EMPTY([property_name])`` to filter on the
+            existence of a key.
+
+            Support nested expressions connected using parenthesis and
+            logical operators. The default logical operators is ``AND``
+            if there is no operators between expressions.
+
+            The query can be used with other filters e.g.
+            ``time_filters`` and ``folder_name_filter``. They are
+            connected with ``AND`` operator under the hood.
+
+            The maximum number of allowed characters is 255.
+        is_nl_query (bool):
+            Experimental, do not use. If the query is a natural language
+            question. False by default. If true, then the
+            question-answering feature will be used instead of search,
+            and ``result_count`` in
+            [SearchDocumentsRequest][google.cloud.contentwarehouse.v1.SearchDocumentsRequest]
+            must be set. In addition, all other input fields related to
+            search (pagination, histograms, etc.) will be ignored.
+        custom_property_filter (str):
+            This filter specifies a structured syntax to match against
+            the [PropertyDefinition].[is_filterable][] marked as
+            ``true``. The syntax for this expression is a subset of SQL
+            syntax.
+
+            Supported operators are: ``=``, ``!=``, ``<``, ``<=``,
+            ``>``, and ``>=`` where the left of the operator is a
+            property name and the right of the operator is a number or a
+            quoted string. You must escape backslash (\) and quote (")
+            characters. Supported functions are
+            ``LOWER([property_name])`` to perform a case insensitive
+            match and ``EMPTY([property_name])`` to filter on the
+            existence of a key.
+
+            Boolean expressions (AND/OR/NOT) are supported up to 3
+            levels of nesting (for example, "((A AND B AND C) OR NOT D)
+            AND E"), a maximum of 100 comparisons or functions are
+            allowed in the expression. The expression must be < 6000
+            bytes in length.
+
+            Sample Query:
+            ``(LOWER(driving_license)="class \"a\"" OR EMPTY(driving_license)) AND driving_years > 10``
+        time_filters (MutableSequence[google.cloud.contentwarehouse_v1.types.TimeFilter]):
+            Documents created/updated within a range
+            specified by this filter are searched against.
+        document_schema_names (MutableSequence[str]):
+            This filter specifies the exact document schema
+            [Document.document_schema_name][google.cloud.contentwarehouse.v1.Document.document_schema_name]
+            of the documents to search against.
+
+            If a value isn't specified, documents within the search
+            results are associated with any schema. If multiple values
+            are specified, documents within the search results may be
+            associated with any of the specified schemas.
+
+            At most 20 document schema names are allowed.
+        property_filter (MutableSequence[google.cloud.contentwarehouse_v1.types.PropertyFilter]):
+            This filter specifies a structured syntax to match against
+            the
+            [PropertyDefinition.is_filterable][google.cloud.contentwarehouse.v1.PropertyDefinition.is_filterable]
+            marked as ``true``. The relationship between the
+            PropertyFilters is OR.
+        file_type_filter (google.cloud.contentwarehouse_v1.types.FileTypeFilter):
+            This filter specifies the types of files to
+            return: ALL, FOLDER, or FILE. If FOLDER or FILE
+            is specified, then only either folders or files
+            will be returned, respectively. If ALL is
+            specified, both folders and files will be
+            returned.
+
+            If no value is specified, ALL files will be
+            returned.
+        folder_name_filter (str):
+            Search all the documents under this specified folder.
+            Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+        document_name_filter (MutableSequence[str]):
+            Search the documents in the list. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+        query_context (MutableSequence[str]):
+            For custom synonyms.
+            Customers provide the synonyms based on context.
+            One customer can provide multiple set of
+            synonyms based on different context. The search
+            query will be expanded based on the custom
+            synonyms of the query context set. By default,
+            no custom synonyms wll be applied if no query
+            context is provided.
+            It is not supported for CMEK compliant
+            deployment.
+        document_creator_filter (MutableSequence[str]):
+            The exact creator(s) of the documents to
+            search against.
+            If a value isn't specified, documents within the
+            search results are associated with any creator.
+            If multiple values are specified, documents
+            within the search results may be associated with
+            any of the specified creators.
+        custom_weights_metadata (google.cloud.contentwarehouse_v1.types.CustomWeightsMetadata):
+            To support the custom weighting across
+            document schemas, customers need to provide the
+            properties to be used to boost the ranking in
+            the search request. For a search query with
+            CustomWeightsMetadata specified, only the
+            RetrievalImportance for the properties in the
+            CustomWeightsMetadata will be honored.
+    """
+
+    query: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    is_nl_query: bool = proto.Field(
+        proto.BOOL,
+        number=12,
+    )
+    custom_property_filter: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+    time_filters: MutableSequence['TimeFilter'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=5,
+        message='TimeFilter',
+    )
+    document_schema_names: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=6,
+    )
+    property_filter: MutableSequence['PropertyFilter'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=7,
+        message='PropertyFilter',
+    )
+    file_type_filter: 'FileTypeFilter' = proto.Field(
+        proto.MESSAGE,
+        number=8,
+        message='FileTypeFilter',
+    )
+    folder_name_filter: str = proto.Field(
+        proto.STRING,
+        number=9,
+    )
+    document_name_filter: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=14,
+    )
+    query_context: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=10,
+    )
+    document_creator_filter: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=11,
+    )
+    custom_weights_metadata: 'CustomWeightsMetadata' = proto.Field(
+        proto.MESSAGE,
+        number=13,
+        message='CustomWeightsMetadata',
+    )
+
+
+class TimeFilter(proto.Message):
+    r"""Filter on create timestamp or update timestamp of documents.
+
+    Attributes:
+        time_range (google.type.interval_pb2.Interval):
+
+        time_field (google.cloud.contentwarehouse_v1.types.TimeFilter.TimeField):
+            Specifies which time field to filter documents on.
+
+            Defaults to [TimeField.UPLOAD_TIME][].
+    """
+    class TimeField(proto.Enum):
+        r"""Time field used in TimeFilter.
+
+        Values:
+            TIME_FIELD_UNSPECIFIED (0):
+                Default value.
+            CREATE_TIME (1):
+                Earliest document create time.
+            UPDATE_TIME (2):
+                Latest document update time.
+            DISPOSITION_TIME (3):
+                Time when document becomes mutable again.
+        """
+        TIME_FIELD_UNSPECIFIED = 0
+        CREATE_TIME = 1
+        UPDATE_TIME = 2
+        DISPOSITION_TIME = 3
+
+    time_range: interval_pb2.Interval = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=interval_pb2.Interval,
+    )
+    time_field: TimeField = proto.Field(
+        proto.ENUM,
+        number=2,
+        enum=TimeField,
+    )
+
+
+class PropertyFilter(proto.Message):
+    r"""
+
+    Attributes:
+        document_schema_name (str):
+            The Document schema name
+            [Document.document_schema_name][google.cloud.contentwarehouse.v1.Document.document_schema_name].
+            Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+        condition (str):
+            The filter condition. The syntax for this expression is a
+            subset of SQL syntax.
+
+            Supported operators are: ``=``, ``!=``, ``<``, ``<=``,
+            ``>``, ``>=``, and ``~~`` where the left of the operator is
+            a property name and the right of the operator is a number or
+            a quoted string. You must escape backslash (\) and quote (")
+            characters.
+
+            ``~~`` is the LIKE operator. The right of the operator must
+            be a string. The only supported property data type for LIKE
+            is text_values. It provides semantic search functionality by
+            parsing, stemming and doing synonyms expansion against the
+            input query. It matches if the property contains semantic
+            similar content to the query. It is not regex matching or
+            wildcard matching. For example, "property.company ~~
+            "google"" will match records whose property
+            ``property.compnay`` have values like "Google Inc.", "Google
+            LLC" or "Google Company".
+
+            Supported functions are ``LOWER([property_name])`` to
+            perform a case insensitive match and
+            ``EMPTY([property_name])`` to filter on the existence of a
+            key.
+
+            Boolean expressions (AND/OR/NOT) are supported up to 3
+            levels of nesting (for example, "((A AND B AND C) OR NOT D)
+            AND E"), a maximum of 100 comparisons or functions are
+            allowed in the expression. The expression must be < 6000
+            bytes in length.
+
+            Only properties that are marked filterable are allowed
+            ([PropertyDefinition.is_filterable][google.cloud.contentwarehouse.v1.PropertyDefinition.is_filterable]).
+            Property names do not need to be prefixed by the document
+            schema id (as is the case with histograms), however property
+            names will need to be prefixed by its parent hierarchy, if
+            any. For example: top_property_name.sub_property_name.
+
+            Sample Query:
+            ``(LOWER(driving_license)="class \"a\"" OR EMPTY(driving_license)) AND driving_years > 10``
+
+            CMEK compliant deployment only supports:
+
+            -  Operators: ``=``, ``<``, ``<=``, ``>``, and ``>=``.
+            -  Boolean expressions: AND and OR.
+    """
+
+    document_schema_name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    condition: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class FileTypeFilter(proto.Message):
+    r"""Filter for the specific types of documents returned.
+
+    Attributes:
+        file_type (google.cloud.contentwarehouse_v1.types.FileTypeFilter.FileType):
+            The type of files to return.
+    """
+    class FileType(proto.Enum):
+        r"""Representation of the types of files.
+
+        Values:
+            FILE_TYPE_UNSPECIFIED (0):
+                Default document type. If set, disables the
+                filter.
+            ALL (1):
+                Returns all document types, including
+                folders.
+            FOLDER (2):
+                Returns only folders.
+            DOCUMENT (3):
+                Returns only non-folder documents.
+            ROOT_FOLDER (4):
+                Returns only root folders
+        """
+        FILE_TYPE_UNSPECIFIED = 0
+        ALL = 1
+        FOLDER = 2
+        DOCUMENT = 3
+        ROOT_FOLDER = 4
+
+    file_type: FileType = proto.Field(
+        proto.ENUM,
+        number=1,
+        enum=FileType,
+    )
+
+
+class CustomWeightsMetadata(proto.Message):
+    r"""To support the custom weighting across document schemas.
+
+    Attributes:
+        weighted_schema_properties (MutableSequence[google.cloud.contentwarehouse_v1.types.WeightedSchemaProperty]):
+            List of schema and property name. Allows a
+            maximum of 10 schemas to be specified for
+            relevance boosting.
+    """
+
+    weighted_schema_properties: MutableSequence['WeightedSchemaProperty'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='WeightedSchemaProperty',
+    )
+
+
+class WeightedSchemaProperty(proto.Message):
+    r"""Specifies the schema property name.
+
+    Attributes:
+        document_schema_name (str):
+            The document schema name.
+        property_names (MutableSequence[str]):
+            The property definition names in the schema.
+    """
+
+    document_schema_name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    property_names: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=2,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/histogram.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/histogram.py
new file mode 100644
index 000000000000..cb27121efd7c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/histogram.py
@@ -0,0 +1,159 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'HistogramQuery',
+        'HistogramQueryPropertyNameFilter',
+        'HistogramQueryResult',
+    },
+)
+
+
+class HistogramQuery(proto.Message):
+    r"""The histogram request.
+
+    Attributes:
+        histogram_query (str):
+            An expression specifies a histogram request against matching
+            documents for searches.
+
+            See
+            [SearchDocumentsRequest.histogram_queries][google.cloud.contentwarehouse.v1.SearchDocumentsRequest.histogram_queries]
+            for details about syntax.
+        require_precise_result_size (bool):
+            Controls if the histogram query requires the
+            return of a precise count. Enable this flag may
+            adversely impact performance.
+
+            Defaults to true.
+        filters (google.cloud.contentwarehouse_v1.types.HistogramQueryPropertyNameFilter):
+            Optional. Filter the result of histogram
+            query by the property names. It only works with
+            histogram query count('FilterableProperties').
+            It is an optional. It will perform histogram on
+            all the property names for all the document
+            schemas. Setting this field will have a better
+            performance.
+    """
+
+    histogram_query: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    require_precise_result_size: bool = proto.Field(
+        proto.BOOL,
+        number=2,
+    )
+    filters: 'HistogramQueryPropertyNameFilter' = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message='HistogramQueryPropertyNameFilter',
+    )
+
+
+class HistogramQueryPropertyNameFilter(proto.Message):
+    r"""
+
+    Attributes:
+        document_schemas (MutableSequence[str]):
+            This filter specifies the exact document schema(s)
+            [Document.document_schema_name][google.cloud.contentwarehouse.v1.Document.document_schema_name]
+            to run histogram query against. It is optional. It will
+            perform histogram for property names for all the document
+            schemas if it is not set.
+
+            At most 10 document schema names are allowed. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+        property_names (MutableSequence[str]):
+            It is optional. It will perform histogram for all the
+            property names if it is not set. The properties need to be
+            defined with the is_filterable flag set to true and the name
+            of the property should be in the format:
+            "schemaId.propertyName". The property needs to be defined in
+            the schema. Example: the schema id is abc. Then the name of
+            property for property MORTGAGE_TYPE will be
+            "abc.MORTGAGE_TYPE".
+        y_axis (google.cloud.contentwarehouse_v1.types.HistogramQueryPropertyNameFilter.HistogramYAxis):
+            By default, the y_axis is HISTOGRAM_YAXIS_DOCUMENT if this
+            field is not set.
+    """
+    class HistogramYAxis(proto.Enum):
+        r"""The result of the histogram query count('FilterableProperties')
+        using HISTOGRAM_YAXIS_DOCUMENT will be: invoice_id: 2 address: 1
+        payment_method: 2 line_item_description: 1
+
+        Values:
+            HISTOGRAM_YAXIS_DOCUMENT (0):
+                Count the documents per property name.
+            HISTOGRAM_YAXIS_PROPERTY (1):
+                Count the properties per property name.
+        """
+        HISTOGRAM_YAXIS_DOCUMENT = 0
+        HISTOGRAM_YAXIS_PROPERTY = 1
+
+    document_schemas: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+    property_names: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=2,
+    )
+    y_axis: HistogramYAxis = proto.Field(
+        proto.ENUM,
+        number=3,
+        enum=HistogramYAxis,
+    )
+
+
+class HistogramQueryResult(proto.Message):
+    r"""Histogram result that matches
+    [HistogramQuery][google.cloud.contentwarehouse.v1.HistogramQuery]
+    specified in searches.
+
+    Attributes:
+        histogram_query (str):
+            Requested histogram expression.
+        histogram (MutableMapping[str, int]):
+            A map from the values of the facet associated with distinct
+            values to the number of matching entries with corresponding
+            value.
+
+            The key format is:
+
+            -  (for string histogram) string values stored in the field.
+    """
+
+    histogram_query: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    histogram: MutableMapping[str, int] = proto.MapField(
+        proto.STRING,
+        proto.INT64,
+        number=2,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipeline_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipeline_service.py
new file mode 100644
index 000000000000..eb0dd4b9133a
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipeline_service.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import pipelines
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'RunPipelineRequest',
+    },
+)
+
+
+class RunPipelineRequest(proto.Message):
+    r"""Request message for DocumentService.RunPipeline.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        name (str):
+            Required. The resource name which owns the resources of the
+            pipeline. Format:
+            projects/{project_number}/locations/{location}.
+        gcs_ingest_pipeline (google.cloud.contentwarehouse_v1.types.GcsIngestPipeline):
+            Cloud Storage ingestion pipeline.
+
+            This field is a member of `oneof`_ ``pipeline``.
+        gcs_ingest_with_doc_ai_processors_pipeline (google.cloud.contentwarehouse_v1.types.GcsIngestWithDocAiProcessorsPipeline):
+            Use DocAI processors to process documents in
+            Cloud Storage and ingest them to Document
+            Warehouse.
+
+            This field is a member of `oneof`_ ``pipeline``.
+        export_cdw_pipeline (google.cloud.contentwarehouse_v1.types.ExportToCdwPipeline):
+            Export docuemnts from Document Warehouse to
+            CDW for training purpose.
+
+            This field is a member of `oneof`_ ``pipeline``.
+        process_with_doc_ai_pipeline (google.cloud.contentwarehouse_v1.types.ProcessWithDocAiPipeline):
+            Use a DocAI processor to process documents in
+            Document Warehouse, and re-ingest the updated
+            results into Document Warehouse.
+
+            This field is a member of `oneof`_ ``pipeline``.
+        request_metadata (google.cloud.contentwarehouse_v1.types.RequestMetadata):
+            The meta information collected about the end
+            user, used to enforce access control for the
+            service.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    gcs_ingest_pipeline: pipelines.GcsIngestPipeline = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        oneof='pipeline',
+        message=pipelines.GcsIngestPipeline,
+    )
+    gcs_ingest_with_doc_ai_processors_pipeline: pipelines.GcsIngestWithDocAiProcessorsPipeline = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        oneof='pipeline',
+        message=pipelines.GcsIngestWithDocAiProcessorsPipeline,
+    )
+    export_cdw_pipeline: pipelines.ExportToCdwPipeline = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='pipeline',
+        message=pipelines.ExportToCdwPipeline,
+    )
+    process_with_doc_ai_pipeline: pipelines.ProcessWithDocAiPipeline = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        oneof='pipeline',
+        message=pipelines.ProcessWithDocAiPipeline,
+    )
+    request_metadata: common.RequestMetadata = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message=common.RequestMetadata,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipelines.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipelines.py
new file mode 100644
index 000000000000..b7d0b476b8c1
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/pipelines.py
@@ -0,0 +1,526 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import common
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.rpc import status_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'RunPipelineResponse',
+        'RunPipelineMetadata',
+        'ProcessorInfo',
+        'IngestPipelineConfig',
+        'GcsIngestPipeline',
+        'GcsIngestWithDocAiProcessorsPipeline',
+        'ExportToCdwPipeline',
+        'ProcessWithDocAiPipeline',
+    },
+)
+
+
+class RunPipelineResponse(proto.Message):
+    r"""Response message of RunPipeline method.
+    """
+
+
+class RunPipelineMetadata(proto.Message):
+    r"""Metadata message of RunPipeline method.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        total_file_count (int):
+            Number of files that were processed by the
+            pipeline.
+        failed_file_count (int):
+            Number of files that have failed at some
+            point in the pipeline.
+        user_info (google.cloud.contentwarehouse_v1.types.UserInfo):
+            User unique identification and groups
+            information.
+        gcs_ingest_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.GcsIngestPipelineMetadata):
+            The pipeline metadata for GcsIngest pipeline.
+
+            This field is a member of `oneof`_ ``pipeline_metadata``.
+        export_to_cdw_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.ExportToCdwPipelineMetadata):
+            The pipeline metadata for Export-to-CDW
+            pipeline.
+
+            This field is a member of `oneof`_ ``pipeline_metadata``.
+        process_with_doc_ai_pipeline_metadata (google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.ProcessWithDocAiPipelineMetadata):
+            The pipeline metadata for Process-with-DocAi
+            pipeline.
+
+            This field is a member of `oneof`_ ``pipeline_metadata``.
+        individual_document_statuses (MutableSequence[google.cloud.contentwarehouse_v1.types.RunPipelineMetadata.IndividualDocumentStatus]):
+            The list of response details of each
+            document.
+    """
+
+    class GcsIngestPipelineMetadata(proto.Message):
+        r"""The metadata message for GcsIngest pipeline.
+
+        Attributes:
+            input_path (str):
+                The input Cloud Storage folder in this pipeline. Format:
+                ``gs://<bucket-name>/<folder-name>``.
+        """
+
+        input_path: str = proto.Field(
+            proto.STRING,
+            number=1,
+        )
+
+    class ExportToCdwPipelineMetadata(proto.Message):
+        r"""The metadata message for Export-to-CDW pipeline.
+
+        Attributes:
+            documents (MutableSequence[str]):
+                The input list of all the resource names of
+                the documents to be exported.
+            doc_ai_dataset (str):
+                The output CDW dataset resource name.
+            output_path (str):
+                The output Cloud Storage folder in this
+                pipeline.
+        """
+
+        documents: MutableSequence[str] = proto.RepeatedField(
+            proto.STRING,
+            number=1,
+        )
+        doc_ai_dataset: str = proto.Field(
+            proto.STRING,
+            number=2,
+        )
+        output_path: str = proto.Field(
+            proto.STRING,
+            number=3,
+        )
+
+    class ProcessWithDocAiPipelineMetadata(proto.Message):
+        r"""The metadata message for Process-with-DocAi pipeline.
+
+        Attributes:
+            documents (MutableSequence[str]):
+                The input list of all the resource names of
+                the documents to be processed.
+            processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo):
+                The DocAI processor to process the documents
+                with.
+        """
+
+        documents: MutableSequence[str] = proto.RepeatedField(
+            proto.STRING,
+            number=1,
+        )
+        processor_info: 'ProcessorInfo' = proto.Field(
+            proto.MESSAGE,
+            number=2,
+            message='ProcessorInfo',
+        )
+
+    class IndividualDocumentStatus(proto.Message):
+        r"""The status of processing a document.
+
+        Attributes:
+            document_id (str):
+                Document identifier of an existing document.
+            status (google.rpc.status_pb2.Status):
+                The status processing the document.
+        """
+
+        document_id: str = proto.Field(
+            proto.STRING,
+            number=1,
+        )
+        status: status_pb2.Status = proto.Field(
+            proto.MESSAGE,
+            number=2,
+            message=status_pb2.Status,
+        )
+
+    total_file_count: int = proto.Field(
+        proto.INT32,
+        number=1,
+    )
+    failed_file_count: int = proto.Field(
+        proto.INT32,
+        number=2,
+    )
+    user_info: common.UserInfo = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message=common.UserInfo,
+    )
+    gcs_ingest_pipeline_metadata: GcsIngestPipelineMetadata = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='pipeline_metadata',
+        message=GcsIngestPipelineMetadata,
+    )
+    export_to_cdw_pipeline_metadata: ExportToCdwPipelineMetadata = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        oneof='pipeline_metadata',
+        message=ExportToCdwPipelineMetadata,
+    )
+    process_with_doc_ai_pipeline_metadata: ProcessWithDocAiPipelineMetadata = proto.Field(
+        proto.MESSAGE,
+        number=7,
+        oneof='pipeline_metadata',
+        message=ProcessWithDocAiPipelineMetadata,
+    )
+    individual_document_statuses: MutableSequence[IndividualDocumentStatus] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=5,
+        message=IndividualDocumentStatus,
+    )
+
+
+class ProcessorInfo(proto.Message):
+    r"""The DocAI processor information.
+
+    Attributes:
+        processor_name (str):
+            The processor resource name. Format is
+            ``projects/{project}/locations/{location}/processors/{processor}``,
+            or
+            ``projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}``
+        document_type (str):
+            The processor will process the documents with
+            this document type.
+        schema_name (str):
+            The Document schema resource name. All documents processed
+            by this processor will use this schema. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+    """
+
+    processor_name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    document_type: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    schema_name: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+
+
+class IngestPipelineConfig(proto.Message):
+    r"""The ingestion pipeline config.
+
+    Attributes:
+        document_acl_policy (google.iam.v1.policy_pb2.Policy):
+            The document level acl policy config. This refers to an
+            Identity and Access (IAM) policy, which specifies access
+            controls for all documents ingested by the pipeline. The
+            [role][google.iam.v1.Binding.role] and
+            [members][google.iam.v1.Binding.role] under the policy needs
+            to be specified.
+
+            The following roles are supported for document level acl
+            control:
+
+            -  roles/contentwarehouse.documentAdmin
+            -  roles/contentwarehouse.documentEditor
+            -  roles/contentwarehouse.documentViewer
+
+            The following members are supported for document level acl
+            control:
+
+            -  user:user-email@example.com
+            -  group:group-email@example.com Note that for documents
+               searched with LLM, only single level user or group acl
+               check is supported.
+        enable_document_text_extraction (bool):
+            The document text extraction enabled flag.
+            If the flag is set to true, DWH will perform
+            text extraction on the raw document.
+        folder (str):
+            Optional. The name of the folder to which all ingested
+            documents will be linked during ingestion process. Format is
+            ``projects/{project}/locations/{location}/documents/{folder_id}``
+        cloud_function (str):
+            The Cloud Function resource name. The Cloud Function needs
+            to live inside consumer project and is accessible to
+            Document AI Warehouse P4SA. Only Cloud Functions V2 is
+            supported. Cloud function execution should complete within 5
+            minutes or this file ingestion may fail due to timeout.
+            Format:
+            ``https://{region}-{project_id}.cloudfunctions.net/{cloud_function}``
+            The following keys are available the request json payload.
+
+            -  display_name
+            -  properties
+            -  plain_text
+            -  reference_id
+            -  document_schema_name
+            -  raw_document_path
+            -  raw_document_file_type
+
+            The following keys from the cloud function json response
+            payload will be ingested to the Document AI Warehouse as
+            part of Document proto content and/or related information.
+            The original values will be overridden if any key is present
+            in the response.
+
+            -  display_name
+            -  properties
+            -  plain_text
+            -  document_acl_policy
+            -  folder
+    """
+
+    document_acl_policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message=policy_pb2.Policy,
+    )
+    enable_document_text_extraction: bool = proto.Field(
+        proto.BOOL,
+        number=2,
+    )
+    folder: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    cloud_function: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+
+
+class GcsIngestPipeline(proto.Message):
+    r"""The configuration of the Cloud Storage Ingestion pipeline.
+
+    Attributes:
+        input_path (str):
+            The input Cloud Storage folder. All files under this folder
+            will be imported to Document Warehouse. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+        schema_name (str):
+            The Document Warehouse schema resource name. All documents
+            processed by this pipeline will use this schema. Format:
+            projects/{project_number}/locations/{location}/documentSchemas/{document_schema_id}.
+        processor_type (str):
+            The Doc AI processor type name. Only used
+            when the format of ingested files is Doc AI
+            Document proto format.
+        skip_ingested_documents (bool):
+            The flag whether to skip ingested documents.
+            If it is set to true, documents in Cloud Storage
+            contains key "status" with value
+            "status=ingested" in custom metadata will be
+            skipped to ingest.
+        pipeline_config (google.cloud.contentwarehouse_v1.types.IngestPipelineConfig):
+            Optional. The config for the Cloud Storage
+            Ingestion pipeline. It provides additional
+            customization options to run the pipeline and
+            can be skipped if it is not applicable.
+    """
+
+    input_path: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    schema_name: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    processor_type: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    skip_ingested_documents: bool = proto.Field(
+        proto.BOOL,
+        number=4,
+    )
+    pipeline_config: 'IngestPipelineConfig' = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        message='IngestPipelineConfig',
+    )
+
+
+class GcsIngestWithDocAiProcessorsPipeline(proto.Message):
+    r"""The configuration of the Cloud Storage Ingestion with DocAI
+    Processors pipeline.
+
+    Attributes:
+        input_path (str):
+            The input Cloud Storage folder. All files under this folder
+            will be imported to Document Warehouse. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+        split_classify_processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo):
+            The split and classify processor information.
+            The split and classify result will be used to
+            find a matched extract processor.
+        extract_processor_infos (MutableSequence[google.cloud.contentwarehouse_v1.types.ProcessorInfo]):
+            The extract processors information.
+            One matched extract processor will be used to
+            process documents based on the classify
+            processor result. If no classify processor is
+            specified, the first extract processor will be
+            used.
+        processor_results_folder_path (str):
+            The Cloud Storage folder path used to store the raw results
+            from processors. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+        skip_ingested_documents (bool):
+            The flag whether to skip ingested documents.
+            If it is set to true, documents in Cloud Storage
+            contains key "status" with value
+            "status=ingested" in custom metadata will be
+            skipped to ingest.
+        pipeline_config (google.cloud.contentwarehouse_v1.types.IngestPipelineConfig):
+            Optional. The config for the Cloud Storage
+            Ingestion with DocAI Processors pipeline. It
+            provides additional customization options to run
+            the pipeline and can be skipped if it is not
+            applicable.
+    """
+
+    input_path: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    split_classify_processor_info: 'ProcessorInfo' = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message='ProcessorInfo',
+    )
+    extract_processor_infos: MutableSequence['ProcessorInfo'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=3,
+        message='ProcessorInfo',
+    )
+    processor_results_folder_path: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+    skip_ingested_documents: bool = proto.Field(
+        proto.BOOL,
+        number=5,
+    )
+    pipeline_config: 'IngestPipelineConfig' = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        message='IngestPipelineConfig',
+    )
+
+
+class ExportToCdwPipeline(proto.Message):
+    r"""The configuration of exporting documents from the Document
+    Warehouse to CDW pipeline.
+
+    Attributes:
+        documents (MutableSequence[str]):
+            The list of all the resource names of the documents to be
+            processed. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+        export_folder_path (str):
+            The Cloud Storage folder path used to store the exported
+            documents before being sent to CDW. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+        doc_ai_dataset (str):
+            Optional. The CDW dataset resource name. This
+            field is optional. If not set, the documents
+            will be exported to Cloud Storage only. Format:
+
+            projects/{project}/locations/{location}/processors/{processor}/dataset
+        training_split_ratio (float):
+            Ratio of training dataset split. When importing into
+            Document AI Workbench, documents will be automatically split
+            into training and test split category with the specified
+            ratio. This field is required if doc_ai_dataset is set.
+    """
+
+    documents: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+    export_folder_path: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    doc_ai_dataset: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    training_split_ratio: float = proto.Field(
+        proto.FLOAT,
+        number=4,
+    )
+
+
+class ProcessWithDocAiPipeline(proto.Message):
+    r"""The configuration of processing documents in Document
+    Warehouse with DocAi processors pipeline.
+
+    Attributes:
+        documents (MutableSequence[str]):
+            The list of all the resource names of the documents to be
+            processed. Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+        export_folder_path (str):
+            The Cloud Storage folder path used to store the exported
+            documents before being sent to CDW. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+        processor_info (google.cloud.contentwarehouse_v1.types.ProcessorInfo):
+            The CDW processor information.
+        processor_results_folder_path (str):
+            The Cloud Storage folder path used to store the raw results
+            from processors. Format:
+            ``gs://<bucket-name>/<folder-name>``.
+    """
+
+    documents: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+    export_folder_path: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    processor_info: 'ProcessorInfo' = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        message='ProcessorInfo',
+    )
+    processor_results_folder_path: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/rule_engine.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/rule_engine.py
new file mode 100644
index 000000000000..6590c3940c8b
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/rule_engine.py
@@ -0,0 +1,584 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.iam.v1 import policy_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'RuleSet',
+        'Rule',
+        'Action',
+        'AccessControlAction',
+        'DataValidationAction',
+        'DataUpdateAction',
+        'AddToFolderAction',
+        'RemoveFromFolderAction',
+        'PublishAction',
+        'DeleteDocumentAction',
+        'RuleEngineOutput',
+        'RuleEvaluatorOutput',
+        'InvalidRule',
+        'ActionExecutorOutput',
+        'RuleActionsPair',
+        'ActionOutput',
+    },
+)
+
+
+class RuleSet(proto.Message):
+    r"""Represents a set of rules from a single customer.
+
+    Attributes:
+        name (str):
+            The resource name of the rule set. Managed internally.
+            Format:
+            projects/{project_number}/locations/{location}/ruleSet/{rule_set_id}.
+
+            The name is ignored when creating a rule set.
+        description (str):
+            Short description of the rule-set.
+        source (str):
+            Source of the rules i.e., customer name.
+        rules (MutableSequence[google.cloud.contentwarehouse_v1.types.Rule]):
+            List of rules given by the customer.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=6,
+    )
+    description: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    source: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    rules: MutableSequence['Rule'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=3,
+        message='Rule',
+    )
+
+
+class Rule(proto.Message):
+    r"""Represents the rule for a content warehouse trigger.
+
+    Attributes:
+        description (str):
+            Short description of the rule and its
+            context.
+        rule_id (str):
+            ID of the rule. It has to be unique across
+            all the examples. This is managed internally.
+        trigger_type (google.cloud.contentwarehouse_v1.types.Rule.TriggerType):
+            Identifies the trigger type for running the
+            policy.
+        condition (str):
+            Represents the conditional expression to be evaluated.
+            Expression should evaluate to a boolean result. When the
+            condition is true actions are executed. Example: user_role =
+            "hsbc_role_1" AND doc.salary > 20000
+        actions (MutableSequence[google.cloud.contentwarehouse_v1.types.Action]):
+            List of actions that are executed when the
+            rule is satisfied.
+    """
+    class TriggerType(proto.Enum):
+        r"""The trigger types for actions.
+
+        Values:
+            UNKNOWN (0):
+                Trigger for unknown action.
+            ON_CREATE (1):
+                Trigger for create document action.
+            ON_UPDATE (4):
+                Trigger for update document action.
+            ON_CREATE_LINK (7):
+                Trigger for create link action.
+            ON_DELETE_LINK (8):
+                Trigger for delete link action.
+        """
+        UNKNOWN = 0
+        ON_CREATE = 1
+        ON_UPDATE = 4
+        ON_CREATE_LINK = 7
+        ON_DELETE_LINK = 8
+
+    description: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    rule_id: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    trigger_type: TriggerType = proto.Field(
+        proto.ENUM,
+        number=3,
+        enum=TriggerType,
+    )
+    condition: str = proto.Field(
+        proto.STRING,
+        number=4,
+    )
+    actions: MutableSequence['Action'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=5,
+        message='Action',
+    )
+
+
+class Action(proto.Message):
+    r"""Represents the action triggered by Rule Engine when the rule
+    is true.
+
+    This message has `oneof`_ fields (mutually exclusive fields).
+    For each oneof, at most one member field can be set at the same time.
+    Setting any member of the oneof automatically clears all other
+    members.
+
+    .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+    Attributes:
+        action_id (str):
+            ID of the action. Managed internally.
+        access_control (google.cloud.contentwarehouse_v1.types.AccessControlAction):
+            Action triggering access control operations.
+
+            This field is a member of `oneof`_ ``action``.
+        data_validation (google.cloud.contentwarehouse_v1.types.DataValidationAction):
+            Action triggering data validation operations.
+
+            This field is a member of `oneof`_ ``action``.
+        data_update (google.cloud.contentwarehouse_v1.types.DataUpdateAction):
+            Action triggering data update operations.
+
+            This field is a member of `oneof`_ ``action``.
+        add_to_folder (google.cloud.contentwarehouse_v1.types.AddToFolderAction):
+            Action triggering create document link
+            operation.
+
+            This field is a member of `oneof`_ ``action``.
+        publish_to_pub_sub (google.cloud.contentwarehouse_v1.types.PublishAction):
+            Action publish to Pub/Sub operation.
+
+            This field is a member of `oneof`_ ``action``.
+        remove_from_folder_action (google.cloud.contentwarehouse_v1.types.RemoveFromFolderAction):
+            Action removing a document from a folder.
+
+            This field is a member of `oneof`_ ``action``.
+        delete_document_action (google.cloud.contentwarehouse_v1.types.DeleteDocumentAction):
+            Action deleting the document.
+
+            This field is a member of `oneof`_ ``action``.
+    """
+
+    action_id: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    access_control: 'AccessControlAction' = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        oneof='action',
+        message='AccessControlAction',
+    )
+    data_validation: 'DataValidationAction' = proto.Field(
+        proto.MESSAGE,
+        number=3,
+        oneof='action',
+        message='DataValidationAction',
+    )
+    data_update: 'DataUpdateAction' = proto.Field(
+        proto.MESSAGE,
+        number=4,
+        oneof='action',
+        message='DataUpdateAction',
+    )
+    add_to_folder: 'AddToFolderAction' = proto.Field(
+        proto.MESSAGE,
+        number=5,
+        oneof='action',
+        message='AddToFolderAction',
+    )
+    publish_to_pub_sub: 'PublishAction' = proto.Field(
+        proto.MESSAGE,
+        number=6,
+        oneof='action',
+        message='PublishAction',
+    )
+    remove_from_folder_action: 'RemoveFromFolderAction' = proto.Field(
+        proto.MESSAGE,
+        number=9,
+        oneof='action',
+        message='RemoveFromFolderAction',
+    )
+    delete_document_action: 'DeleteDocumentAction' = proto.Field(
+        proto.MESSAGE,
+        number=10,
+        oneof='action',
+        message='DeleteDocumentAction',
+    )
+
+
+class AccessControlAction(proto.Message):
+    r"""Represents the action responsible for access control list
+    management operations.
+
+    Attributes:
+        operation_type (google.cloud.contentwarehouse_v1.types.AccessControlAction.OperationType):
+            Identifies the type of operation.
+        policy (google.iam.v1.policy_pb2.Policy):
+            Represents the new policy from which bindings
+            are added, removed or replaced based on the type
+            of the operation. the policy is limited to a few
+            10s of KB.
+    """
+    class OperationType(proto.Enum):
+        r"""Type of ACL modification operation.
+
+        Values:
+            UNKNOWN (0):
+                The unknown operation type.
+            ADD_POLICY_BINDING (1):
+                Adds newly given policy bindings in the
+                existing bindings list.
+            REMOVE_POLICY_BINDING (2):
+                Removes newly given policy bindings from the
+                existing bindings list.
+            REPLACE_POLICY_BINDING (3):
+                Replaces existing policy bindings with the
+                given policy binding list
+        """
+        UNKNOWN = 0
+        ADD_POLICY_BINDING = 1
+        REMOVE_POLICY_BINDING = 2
+        REPLACE_POLICY_BINDING = 3
+
+    operation_type: OperationType = proto.Field(
+        proto.ENUM,
+        number=1,
+        enum=OperationType,
+    )
+    policy: policy_pb2.Policy = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=policy_pb2.Policy,
+    )
+
+
+class DataValidationAction(proto.Message):
+    r"""Represents the action responsible for data validation
+    operations.
+
+    Attributes:
+        conditions (MutableMapping[str, str]):
+            Map of (K, V) -> (field, string condition to
+            be evaluated on the field) E.g., ("age", "age >
+            18  && age < 60") entry triggers validation of
+            field age with the given condition. Map entries
+            will be ANDed during validation.
+    """
+
+    conditions: MutableMapping[str, str] = proto.MapField(
+        proto.STRING,
+        proto.STRING,
+        number=1,
+    )
+
+
+class DataUpdateAction(proto.Message):
+    r"""Represents the action responsible for properties update
+    operations.
+
+    Attributes:
+        entries (MutableMapping[str, str]):
+            Map of (K, V) -> (valid name of the field,
+            new value of the field) E.g., ("age", "60")
+            entry triggers update of field age with a value
+            of 60. If the field is not present then new
+            entry is added. During update action execution,
+            value strings will be casted to appropriate
+            types.
+    """
+
+    entries: MutableMapping[str, str] = proto.MapField(
+        proto.STRING,
+        proto.STRING,
+        number=1,
+    )
+
+
+class AddToFolderAction(proto.Message):
+    r"""Represents the action responsible for adding document under a
+    folder.
+
+    Attributes:
+        folders (MutableSequence[str]):
+            Names of the folder under which new document is to be added.
+            Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+    """
+
+    folders: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=1,
+    )
+
+
+class RemoveFromFolderAction(proto.Message):
+    r"""Represents the action responsible for remove a document from
+    a specific folder.
+
+    Attributes:
+        condition (str):
+            Condition of the action to be executed.
+        folder (str):
+            Name of the folder under which new document is to be added.
+            Format:
+            projects/{project_number}/locations/{location}/documents/{document_id}.
+    """
+
+    condition: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    folder: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class PublishAction(proto.Message):
+    r"""Represents the action responsible for publishing messages to
+    a Pub/Sub topic.
+
+    Attributes:
+        topic_id (str):
+            The topic id in the Pub/Sub service for which
+            messages will be published to.
+        messages (MutableSequence[str]):
+            Messages to be published.
+    """
+
+    topic_id: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    messages: MutableSequence[str] = proto.RepeatedField(
+        proto.STRING,
+        number=2,
+    )
+
+
+class DeleteDocumentAction(proto.Message):
+    r"""Represents the action responsible for deleting the document.
+
+    Attributes:
+        enable_hard_delete (bool):
+            Boolean field to select between hard vs soft
+            delete options. Set 'true' for 'hard delete' and
+            'false' for 'soft delete'.
+    """
+
+    enable_hard_delete: bool = proto.Field(
+        proto.BOOL,
+        number=1,
+    )
+
+
+class RuleEngineOutput(proto.Message):
+    r"""Records the output of Rule Engine including rule evaluation
+    and actions result.
+
+    Attributes:
+        document_name (str):
+            Name of the document against which the rules
+            and actions were evaluated.
+        rule_evaluator_output (google.cloud.contentwarehouse_v1.types.RuleEvaluatorOutput):
+            Output from Rule Evaluator containing
+            matched, unmatched and invalid rules.
+        action_executor_output (google.cloud.contentwarehouse_v1.types.ActionExecutorOutput):
+            Output from Action Executor containing rule
+            and corresponding actions execution result.
+    """
+
+    document_name: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+    rule_evaluator_output: 'RuleEvaluatorOutput' = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message='RuleEvaluatorOutput',
+    )
+    action_executor_output: 'ActionExecutorOutput' = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message='ActionExecutorOutput',
+    )
+
+
+class RuleEvaluatorOutput(proto.Message):
+    r"""Represents the output of the Rule Evaluator.
+
+    Attributes:
+        triggered_rules (MutableSequence[google.cloud.contentwarehouse_v1.types.Rule]):
+            List of rules fetched from database for the
+            given request trigger type.
+        matched_rules (MutableSequence[google.cloud.contentwarehouse_v1.types.Rule]):
+            A subset of triggered rules that are
+            evaluated true for a given request.
+        invalid_rules (MutableSequence[google.cloud.contentwarehouse_v1.types.InvalidRule]):
+            A subset of triggered rules that failed the
+            validation check(s) after parsing.
+    """
+
+    triggered_rules: MutableSequence['Rule'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='Rule',
+    )
+    matched_rules: MutableSequence['Rule'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=2,
+        message='Rule',
+    )
+    invalid_rules: MutableSequence['InvalidRule'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=3,
+        message='InvalidRule',
+    )
+
+
+class InvalidRule(proto.Message):
+    r"""A triggered rule that failed the validation check(s) after
+    parsing.
+
+    Attributes:
+        rule (google.cloud.contentwarehouse_v1.types.Rule):
+            Triggered rule.
+        error (str):
+            Validation error on a parsed expression.
+    """
+
+    rule: 'Rule' = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message='Rule',
+    )
+    error: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class ActionExecutorOutput(proto.Message):
+    r"""Represents the output of the Action Executor.
+
+    Attributes:
+        rule_actions_pairs (MutableSequence[google.cloud.contentwarehouse_v1.types.RuleActionsPair]):
+            List of rule and corresponding actions
+            result.
+    """
+
+    rule_actions_pairs: MutableSequence['RuleActionsPair'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message='RuleActionsPair',
+    )
+
+
+class RuleActionsPair(proto.Message):
+    r"""Represents a rule and outputs of associated actions.
+
+    Attributes:
+        rule (google.cloud.contentwarehouse_v1.types.Rule):
+            Represents the rule.
+        action_outputs (MutableSequence[google.cloud.contentwarehouse_v1.types.ActionOutput]):
+            Outputs of executing the actions associated
+            with the above rule.
+    """
+
+    rule: 'Rule' = proto.Field(
+        proto.MESSAGE,
+        number=1,
+        message='Rule',
+    )
+    action_outputs: MutableSequence['ActionOutput'] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=2,
+        message='ActionOutput',
+    )
+
+
+class ActionOutput(proto.Message):
+    r"""Represents the result of executing an action.
+
+    Attributes:
+        action_id (str):
+            ID of the action.
+        action_state (google.cloud.contentwarehouse_v1.types.ActionOutput.State):
+            State of an action.
+        output_message (str):
+            Action execution output message.
+    """
+    class State(proto.Enum):
+        r"""Represents execution state of the action.
+
+        Values:
+            UNKNOWN (0):
+                The unknown state.
+            ACTION_SUCCEEDED (1):
+                State indicating action executed
+                successfully.
+            ACTION_FAILED (2):
+                State indicating action failed.
+            ACTION_TIMED_OUT (3):
+                State indicating action timed out.
+            ACTION_PENDING (4):
+                State indicating action is pending.
+        """
+        UNKNOWN = 0
+        ACTION_SUCCEEDED = 1
+        ACTION_FAILED = 2
+        ACTION_TIMED_OUT = 3
+        ACTION_PENDING = 4
+
+    action_id: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    action_state: State = proto.Field(
+        proto.ENUM,
+        number=2,
+        enum=State,
+    )
+    output_message: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service.py
new file mode 100644
index 000000000000..d18bb8f366f8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+    },
+)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py
new file mode 100644
index 000000000000..7b793f34d5d8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/ruleset_service_request.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import rule_engine
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CreateRuleSetRequest',
+        'GetRuleSetRequest',
+        'UpdateRuleSetRequest',
+        'DeleteRuleSetRequest',
+        'ListRuleSetsRequest',
+        'ListRuleSetsResponse',
+    },
+)
+
+
+class CreateRuleSetRequest(proto.Message):
+    r"""Request message for RuleSetService.CreateRuleSet.
+
+    Attributes:
+        parent (str):
+            Required. The parent name. Format:
+            projects/{project_number}/locations/{location}.
+        rule_set (google.cloud.contentwarehouse_v1.types.RuleSet):
+            Required. The rule set to create.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    rule_set: rule_engine.RuleSet = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=rule_engine.RuleSet,
+    )
+
+
+class GetRuleSetRequest(proto.Message):
+    r"""Request message for RuleSetService.GetRuleSet.
+
+    Attributes:
+        name (str):
+            Required. The name of the rule set to retrieve. Format:
+            projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class UpdateRuleSetRequest(proto.Message):
+    r"""Request message for RuleSetService.UpdateRuleSet.
+
+    Attributes:
+        name (str):
+            Required. The name of the rule set to update. Format:
+            projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+        rule_set (google.cloud.contentwarehouse_v1.types.RuleSet):
+            Required. The rule set to update.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    rule_set: rule_engine.RuleSet = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=rule_engine.RuleSet,
+    )
+
+
+class DeleteRuleSetRequest(proto.Message):
+    r"""Request message for RuleSetService.DeleteRuleSet.
+
+    Attributes:
+        name (str):
+            Required. The name of the rule set to delete. Format:
+            projects/{project_number}/locations/{location}/ruleSets/{rule_set_id}.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class ListRuleSetsRequest(proto.Message):
+    r"""Request message for RuleSetService.ListRuleSets.
+
+    Attributes:
+        parent (str):
+            Required. The parent, which owns this collection of
+            document. Format:
+            projects/{project_number}/locations/{location}.
+        page_size (int):
+            The maximum number of rule sets to return.
+            The service may return fewer than this value. If
+            unspecified, at most 50 rule sets will be
+            returned. The maximum value is 1000; values
+            above 1000 will be coerced to 1000.
+        page_token (str):
+            A page token, received from a previous ``ListRuleSets``
+            call. Provide this to retrieve the subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListRuleSets`` must match the call that provided the page
+            token.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    page_size: int = proto.Field(
+        proto.INT32,
+        number=2,
+    )
+    page_token: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+
+
+class ListRuleSetsResponse(proto.Message):
+    r"""Response message for RuleSetService.ListRuleSets.
+
+    Attributes:
+        rule_sets (MutableSequence[google.cloud.contentwarehouse_v1.types.RuleSet]):
+            The rule sets from the specified parent.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    rule_sets: MutableSequence[rule_engine.RuleSet] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=rule_engine.RuleSet,
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset.py
new file mode 100644
index 000000000000..c7dac6cd7dc6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'SynonymSet',
+    },
+)
+
+
+class SynonymSet(proto.Message):
+    r"""Represents a list of synonyms for a given context.
+    For example a context "sales" could contain:
+
+    Synonym 1: sale, invoice, bill, order
+    Synonym 2: money, credit, finance, payment
+    Synonym 3: shipping, freight, transport
+    Each SynonymSets should be disjoint
+
+    Attributes:
+        name (str):
+            The resource name of the SynonymSet This is mandatory for
+            google.api.resource. Format:
+            projects/{project_number}/locations/{location}/synonymSets/{context}.
+        context (str):
+            This is a freeform field. Example contexts
+            can be "sales," "engineering," "real estate,"
+            "accounting," etc. The context can be supplied
+            during search requests.
+        synonyms (MutableSequence[google.cloud.contentwarehouse_v1.types.SynonymSet.Synonym]):
+            List of Synonyms for the context.
+    """
+
+    class Synonym(proto.Message):
+        r"""Represents a list of words given by the customer
+        All these words are synonyms of each other.
+
+        Attributes:
+            words (MutableSequence[str]):
+                For example: sale, invoice, bill, order
+        """
+
+        words: MutableSequence[str] = proto.RepeatedField(
+            proto.STRING,
+            number=1,
+        )
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    context: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+    synonyms: MutableSequence[Synonym] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=3,
+        message=Synonym,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service.py
new file mode 100644
index 000000000000..d18bb8f366f8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+    },
+)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py
new file mode 100644
index 000000000000..34bd6ef80ceb
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/google/cloud/contentwarehouse_v1/types/synonymset_service_request.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto  # type: ignore
+
+from google.cloud.contentwarehouse_v1.types import synonymset
+
+
+__protobuf__ = proto.module(
+    package='google.cloud.contentwarehouse.v1',
+    manifest={
+        'CreateSynonymSetRequest',
+        'GetSynonymSetRequest',
+        'ListSynonymSetsRequest',
+        'ListSynonymSetsResponse',
+        'UpdateSynonymSetRequest',
+        'DeleteSynonymSetRequest',
+    },
+)
+
+
+class CreateSynonymSetRequest(proto.Message):
+    r"""Request message for SynonymSetService.CreateSynonymSet.
+
+    Attributes:
+        parent (str):
+            Required. The parent name. Format:
+            projects/{project_number}/locations/{location}.
+        synonym_set (google.cloud.contentwarehouse_v1.types.SynonymSet):
+            Required. The synonymSet to be created for a
+            context
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    synonym_set: synonymset.SynonymSet = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=synonymset.SynonymSet,
+    )
+
+
+class GetSynonymSetRequest(proto.Message):
+    r"""Request message for SynonymSetService.GetSynonymSet.
+    Will return synonymSet for a certain context.
+
+    Attributes:
+        name (str):
+            Required. The name of the synonymSet to retrieve Format:
+            projects/{project_number}/locations/{location}/synonymSets/{context}.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+class ListSynonymSetsRequest(proto.Message):
+    r"""Request message for SynonymSetService.ListSynonymSets.
+    Will return all synonymSets belonging to the customer project.
+
+    Attributes:
+        parent (str):
+            Required. The parent name. Format:
+            projects/{project_number}/locations/{location}.
+        page_size (int):
+            The maximum number of synonymSets to return.
+            The service may return fewer than this value. If
+            unspecified, at most 50 rule sets will be
+            returned. The maximum value is 1000; values
+            above 1000 will be coerced to 1000.
+        page_token (str):
+            A page token, received from a previous ``ListSynonymSets``
+            call. Provide this to retrieve the subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListSynonymSets`` must match the call that provided the
+            page token.
+    """
+
+    parent: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    page_size: int = proto.Field(
+        proto.INT32,
+        number=2,
+    )
+    page_token: str = proto.Field(
+        proto.STRING,
+        number=3,
+    )
+
+
+class ListSynonymSetsResponse(proto.Message):
+    r"""Response message for SynonymSetService.ListSynonymSets.
+
+    Attributes:
+        synonym_sets (MutableSequence[google.cloud.contentwarehouse_v1.types.SynonymSet]):
+            The synonymSets from the specified parent.
+        next_page_token (str):
+            A page token, received from a previous ``ListSynonymSets``
+            call. Provide this to retrieve the subsequent page.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    synonym_sets: MutableSequence[synonymset.SynonymSet] = proto.RepeatedField(
+        proto.MESSAGE,
+        number=1,
+        message=synonymset.SynonymSet,
+    )
+    next_page_token: str = proto.Field(
+        proto.STRING,
+        number=2,
+    )
+
+
+class UpdateSynonymSetRequest(proto.Message):
+    r"""Request message for SynonymSetService.UpdateSynonymSet.
+    Removes the SynonymSet for the specified context and replaces it
+    with the SynonymSet in this request.
+
+    Attributes:
+        name (str):
+            Required. The name of the synonymSet to update Format:
+            projects/{project_number}/locations/{location}/synonymSets/{context}.
+        synonym_set (google.cloud.contentwarehouse_v1.types.SynonymSet):
+            Required. The synonymSet to be updated for
+            the customer
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+    synonym_set: synonymset.SynonymSet = proto.Field(
+        proto.MESSAGE,
+        number=2,
+        message=synonymset.SynonymSet,
+    )
+
+
+class DeleteSynonymSetRequest(proto.Message):
+    r"""Request message for SynonymSetService.DeleteSynonymSet.
+
+    Attributes:
+        name (str):
+            Required. The name of the synonymSet to delete Format:
+            projects/{project_number}/locations/{location}/synonymSets/{context}.
+    """
+
+    name: str = proto.Field(
+        proto.STRING,
+        number=1,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/mypy.ini b/owl-bot-staging/google-cloud-contentwarehouse/v1/mypy.ini
new file mode 100644
index 000000000000..574c5aed394b
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+python_version = 3.7
+namespace_packages = True
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/noxfile.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/noxfile.py
new file mode 100644
index 000000000000..b45545ecab51
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/noxfile.py
@@ -0,0 +1,280 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import pathlib
+import re
+import shutil
+import subprocess
+import sys
+
+
+import nox  # type: ignore
+
+ALL_PYTHON = [
+    "3.7",
+    "3.8",
+    "3.9",
+    "3.10",
+    "3.11",
+    "3.12",
+    "3.13",
+]
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
+PACKAGE_NAME = 'google-cloud-contentwarehouse'
+
+BLACK_VERSION = "black==22.3.0"
+BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"]
+DEFAULT_PYTHON_VERSION = "3.13"
+
+nox.sessions = [
+    "unit",
+    "cover",
+    "mypy",
+    "check_lower_bounds"
+    # exclude update_lower_bounds from default
+    "docs",
+    "blacken",
+    "lint",
+    "prerelease_deps",
+]
+
+@nox.session(python=ALL_PYTHON)
+@nox.parametrize(
+    "protobuf_implementation",
+    [ "python", "upb", "cpp" ],
+)
+def unit(session, protobuf_implementation):
+    """Run the unit test suite."""
+
+    if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+        session.skip("cpp implementation is not supported in python 3.11+")
+
+    session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"')
+    session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt")
+
+    # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped.
+    # The 'cpp' implementation requires Protobuf<4.
+    if protobuf_implementation == "cpp":
+        session.install("protobuf<4")
+
+    session.run(
+        'py.test',
+        '--quiet',
+        '--cov=google/cloud/contentwarehouse_v1/',
+        '--cov=tests/',
+        '--cov-config=.coveragerc',
+        '--cov-report=term',
+        '--cov-report=html',
+        os.path.join('tests', 'unit', ''.join(session.posargs)),
+        env={
+            "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+        },
+    )
+
+@nox.session(python=ALL_PYTHON[-1])
+@nox.parametrize(
+    "protobuf_implementation",
+    [ "python", "upb", "cpp" ],
+)
+def prerelease_deps(session, protobuf_implementation):
+    """Run the unit test suite against pre-release versions of dependencies."""
+
+    if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+        session.skip("cpp implementation is not supported in python 3.11+")
+
+    # Install test environment dependencies
+    session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"')
+
+    # Install the package without dependencies
+    session.install('-e', '.', '--no-deps')
+
+    # We test the minimum dependency versions using the minimum Python
+    # version so the lowest python runtime that we test has a corresponding constraints
+    # file, located at `testing/constraints-<version>-.txt`,  which contains all of the
+    # dependencies and extras.
+    with open(
+        CURRENT_DIRECTORY
+        / "testing"
+        / f"constraints-{ALL_PYTHON[0]}.txt",
+        encoding="utf-8",
+    ) as constraints_file:
+        constraints_text = constraints_file.read()
+
+    # Ignore leading whitespace and comment lines.
+    constraints_deps = [
+        match.group(1)
+        for match in re.finditer(
+            r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE
+        )
+    ]
+
+    session.install(*constraints_deps)
+
+    prerel_deps = [
+        "googleapis-common-protos",
+        "google-api-core",
+        "google-auth",
+        # Exclude grpcio!=1.67.0rc1 which does not support python 3.13
+        "grpcio!=1.67.0rc1",
+        "grpcio-status",
+        "protobuf",
+        "proto-plus",
+    ]
+
+    for dep in prerel_deps:
+        session.install("--pre", "--no-deps", "--upgrade", dep)
+
+    # Remaining dependencies
+    other_deps = [
+        "requests",
+    ]
+    session.install(*other_deps)
+
+    # Print out prerelease package versions
+
+    session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)")
+    session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
+    session.run("python", "-c", "import grpc; print(grpc.__version__)")
+    session.run(
+        "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
+    )
+    session.run(
+        "python", "-c", "import proto; print(proto.__version__)"
+    )
+
+    session.run(
+        'py.test',
+        '--quiet',
+        '--cov=google/cloud/contentwarehouse_v1/',
+        '--cov=tests/',
+        '--cov-config=.coveragerc',
+        '--cov-report=term',
+        '--cov-report=html',
+        os.path.join('tests', 'unit', ''.join(session.posargs)),
+        env={
+            "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+        },
+    )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def cover(session):
+    """Run the final coverage report.
+    This outputs the coverage report aggregating coverage from the unit
+    test runs (not system test runs), and then erases coverage data.
+    """
+    session.install("coverage", "pytest-cov")
+    session.run("coverage", "report", "--show-missing", "--fail-under=100")
+
+    session.run("coverage", "erase")
+
+
+@nox.session(python=ALL_PYTHON)
+def mypy(session):
+    """Run the type checker."""
+    session.install(
+        'mypy',
+        'types-requests',
+        'types-protobuf'
+    )
+    session.install('.')
+    session.run(
+        'mypy',
+        '-p',
+        'google',
+    )
+
+
+@nox.session
+def update_lower_bounds(session):
+    """Update lower bounds in constraints.txt to match setup.py"""
+    session.install('google-cloud-testutils')
+    session.install('.')
+
+    session.run(
+        'lower-bound-checker',
+        'update',
+        '--package-name',
+        PACKAGE_NAME,
+        '--constraints-file',
+        str(LOWER_BOUND_CONSTRAINTS_FILE),
+    )
+
+
+@nox.session
+def check_lower_bounds(session):
+    """Check lower bounds in setup.py are reflected in constraints file"""
+    session.install('google-cloud-testutils')
+    session.install('.')
+
+    session.run(
+        'lower-bound-checker',
+        'check',
+        '--package-name',
+        PACKAGE_NAME,
+        '--constraints-file',
+        str(LOWER_BOUND_CONSTRAINTS_FILE),
+    )
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docs(session):
+    """Build the docs for this library."""
+
+    session.install("-e", ".")
+    session.install("sphinx==7.0.1", "alabaster", "recommonmark")
+
+    shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+    session.run(
+        "sphinx-build",
+        "-W",  # warnings as errors
+        "-T",  # show full traceback on exception
+        "-N",  # no colors
+        "-b",
+        "html",
+        "-d",
+        os.path.join("docs", "_build", "doctrees", ""),
+        os.path.join("docs", ""),
+        os.path.join("docs", "_build", "html", ""),
+    )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint(session):
+    """Run linters.
+
+    Returns a failure if the linters find linting errors or sufficiently
+    serious code quality issues.
+    """
+    session.install("flake8", BLACK_VERSION)
+    session.run(
+        "black",
+        "--check",
+        *BLACK_PATHS,
+    )
+    session.run("flake8", "google", "tests", "samples")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def blacken(session):
+    """Run black. Format code to uniform standard."""
+    session.install(BLACK_VERSION)
+    session.run(
+        "black",
+        *BLACK_PATHS,
+    )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_async.py
new file mode 100644
index 000000000000..82c3ae52bb6d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocumentLink
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_create_document_link():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateDocumentLinkRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = await client.create_document_link(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_sync.py
new file mode 100644
index 000000000000..c4b4c9c207a8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_create_document_link_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocumentLink
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_create_document_link():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateDocumentLinkRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = client.create_document_link(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_async.py
new file mode 100644
index 000000000000..40a388b5f88f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_async.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocumentLink
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_delete_document_link():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentLinkRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    await client.delete_document_link(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_sync.py
new file mode 100644
index 000000000000..884cc7b58a52
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_delete_document_link_sync.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocumentLink
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_delete_document_link():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentLinkRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    client.delete_document_link(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_async.py
new file mode 100644
index 000000000000..ff49eb5a7745
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_async.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListLinkedSources
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_list_linked_sources():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListLinkedSourcesRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_linked_sources(request=request)
+
+    # Handle the response
+    async for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_sync.py
new file mode 100644
index 000000000000..87aca2f8c48d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_sources_sync.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListLinkedSources
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_list_linked_sources():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListLinkedSourcesRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_linked_sources(request=request)
+
+    # Handle the response
+    for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_async.py
new file mode 100644
index 000000000000..61690fc1ee53
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListLinkedTargets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_list_linked_targets():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListLinkedTargetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = await client.list_linked_targets(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_sync.py
new file mode 100644
index 000000000000..7c1857bd11ad
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_link_service_list_linked_targets_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListLinkedTargets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_list_linked_targets():
+    # Create a client
+    client = contentwarehouse_v1.DocumentLinkServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListLinkedTargetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = client.list_linked_targets(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_async.py
new file mode 100644
index 000000000000..1a01e1c8d072
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_async.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_create_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+    # Initialize request argument(s)
+    document_schema = contentwarehouse_v1.DocumentSchema()
+    document_schema.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.CreateDocumentSchemaRequest(
+        parent="parent_value",
+        document_schema=document_schema,
+    )
+
+    # Make the request
+    response = await client.create_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_sync.py
new file mode 100644
index 000000000000..63003624c552
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_create_document_schema_sync.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_create_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+    # Initialize request argument(s)
+    document_schema = contentwarehouse_v1.DocumentSchema()
+    document_schema.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.CreateDocumentSchemaRequest(
+        parent="parent_value",
+        document_schema=document_schema,
+    )
+
+    # Make the request
+    response = client.create_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_async.py
new file mode 100644
index 000000000000..7c2de5326409
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_async.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_delete_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentSchemaRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    await client.delete_document_schema(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_sync.py
new file mode 100644
index 000000000000..193554ccfd7e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_delete_document_schema_sync.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_delete_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentSchemaRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    client.delete_document_schema(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_async.py
new file mode 100644
index 000000000000..835a2bea43c8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_get_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetDocumentSchemaRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.get_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_sync.py
new file mode 100644
index 000000000000..4a27e278ea60
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_get_document_schema_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_get_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetDocumentSchemaRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.get_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_async.py
new file mode 100644
index 000000000000..667079e75aef
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_async.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListDocumentSchemas
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_list_document_schemas():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListDocumentSchemasRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_document_schemas(request=request)
+
+    # Handle the response
+    async for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_sync.py
new file mode 100644
index 000000000000..5ade9dc307d8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_list_document_schemas_sync.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListDocumentSchemas
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_list_document_schemas():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListDocumentSchemasRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_document_schemas(request=request)
+
+    # Handle the response
+    for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_async.py
new file mode 100644
index 000000000000..7558de5a4c29
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_async.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_update_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceAsyncClient()
+
+    # Initialize request argument(s)
+    document_schema = contentwarehouse_v1.DocumentSchema()
+    document_schema.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.UpdateDocumentSchemaRequest(
+        name="name_value",
+        document_schema=document_schema,
+    )
+
+    # Make the request
+    response = await client.update_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_sync.py
new file mode 100644
index 000000000000..c2eef1d12e55
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_schema_service_update_document_schema_sync.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateDocumentSchema
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_update_document_schema():
+    # Create a client
+    client = contentwarehouse_v1.DocumentSchemaServiceClient()
+
+    # Initialize request argument(s)
+    document_schema = contentwarehouse_v1.DocumentSchema()
+    document_schema.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.UpdateDocumentSchemaRequest(
+        name="name_value",
+        document_schema=document_schema,
+    )
+
+    # Make the request
+    response = client.update_document_schema(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_async.py
new file mode 100644
index 000000000000..4738c6f9d3fd
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_async.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_CreateDocument_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_create_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    document = contentwarehouse_v1.Document()
+    document.plain_text = "plain_text_value"
+    document.raw_document_path = "raw_document_path_value"
+    document.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.CreateDocumentRequest(
+        parent="parent_value",
+        document=document,
+    )
+
+    # Make the request
+    response = await client.create_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_CreateDocument_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_sync.py
new file mode 100644
index 000000000000..a5a4482483e4
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_create_document_sync.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_CreateDocument_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_create_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    document = contentwarehouse_v1.Document()
+    document.plain_text = "plain_text_value"
+    document.raw_document_path = "raw_document_path_value"
+    document.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.CreateDocumentRequest(
+        parent="parent_value",
+        document=document,
+    )
+
+    # Make the request
+    response = client.create_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_CreateDocument_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_async.py
new file mode 100644
index 000000000000..d04e5c78056e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_async.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_DeleteDocument_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_delete_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    await client.delete_document(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentService_DeleteDocument_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_sync.py
new file mode 100644
index 000000000000..f76b4c9c916f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_delete_document_sync.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_DeleteDocument_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_delete_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    client.delete_document(request=request)
+
+
+# [END contentwarehouse_v1_generated_DocumentService_DeleteDocument_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_async.py
new file mode 100644
index 000000000000..90df080d027e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for FetchAcl
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_FetchAcl_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_fetch_acl():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.FetchAclRequest(
+        resource="resource_value",
+    )
+
+    # Make the request
+    response = await client.fetch_acl(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_FetchAcl_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_sync.py
new file mode 100644
index 000000000000..5f5527dd93f8
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_fetch_acl_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for FetchAcl
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_FetchAcl_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_fetch_acl():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.FetchAclRequest(
+        resource="resource_value",
+    )
+
+    # Make the request
+    response = client.fetch_acl(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_FetchAcl_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_async.py
new file mode 100644
index 000000000000..0e164546a770
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_GetDocument_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_get_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.get_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_GetDocument_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_sync.py
new file mode 100644
index 000000000000..b87347588119
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_get_document_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_GetDocument_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_get_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.get_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_GetDocument_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py
new file mode 100644
index 000000000000..f50aa7912fe1
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for LockDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_LockDocument_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_lock_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.LockDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.lock_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_LockDocument_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py
new file mode 100644
index 000000000000..8c08641295a6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_lock_document_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for LockDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_LockDocument_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_lock_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.LockDocumentRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.lock_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_LockDocument_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_async.py
new file mode 100644
index 000000000000..ef7c7f599c69
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_async.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for SearchDocuments
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_SearchDocuments_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_search_documents():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.SearchDocumentsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.search_documents(request=request)
+
+    # Handle the response
+    async for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_SearchDocuments_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_sync.py
new file mode 100644
index 000000000000..d4aa22219213
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_search_documents_sync.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for SearchDocuments
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_SearchDocuments_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_search_documents():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.SearchDocumentsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.search_documents(request=request)
+
+    # Handle the response
+    for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_SearchDocuments_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_async.py
new file mode 100644
index 000000000000..8a91967e700f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for SetAcl
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_SetAcl_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_set_acl():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.SetAclRequest(
+        resource="resource_value",
+    )
+
+    # Make the request
+    response = await client.set_acl(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_SetAcl_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_sync.py
new file mode 100644
index 000000000000..98ec8651a0ce
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_set_acl_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for SetAcl
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_SetAcl_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_set_acl():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.SetAclRequest(
+        resource="resource_value",
+    )
+
+    # Make the request
+    response = client.set_acl(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_SetAcl_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_async.py
new file mode 100644
index 000000000000..54f355e5b5a6
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_async.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_UpdateDocument_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_update_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceAsyncClient()
+
+    # Initialize request argument(s)
+    document = contentwarehouse_v1.Document()
+    document.plain_text = "plain_text_value"
+    document.raw_document_path = "raw_document_path_value"
+    document.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.UpdateDocumentRequest(
+        name="name_value",
+        document=document,
+    )
+
+    # Make the request
+    response = await client.update_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_UpdateDocument_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_sync.py
new file mode 100644
index 000000000000..5742cf5932fc
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_document_service_update_document_sync.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateDocument
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_DocumentService_UpdateDocument_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_update_document():
+    # Create a client
+    client = contentwarehouse_v1.DocumentServiceClient()
+
+    # Initialize request argument(s)
+    document = contentwarehouse_v1.Document()
+    document.plain_text = "plain_text_value"
+    document.raw_document_path = "raw_document_path_value"
+    document.display_name = "display_name_value"
+
+    request = contentwarehouse_v1.UpdateDocumentRequest(
+        name="name_value",
+        document=document,
+    )
+
+    # Make the request
+    response = client.update_document(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_DocumentService_UpdateDocument_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py
new file mode 100644
index 000000000000..a33707432e97
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for RunPipeline
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_PipelineService_RunPipeline_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_run_pipeline():
+    # Create a client
+    client = contentwarehouse_v1.PipelineServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.RunPipelineRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    operation = client.run_pipeline(request=request)
+
+    print("Waiting for operation to complete...")
+
+    response = (await operation).result()
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_PipelineService_RunPipeline_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py
new file mode 100644
index 000000000000..1388f4d5bc4e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for RunPipeline
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_PipelineService_RunPipeline_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_run_pipeline():
+    # Create a client
+    client = contentwarehouse_v1.PipelineServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.RunPipelineRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    operation = client.run_pipeline(request=request)
+
+    print("Waiting for operation to complete...")
+
+    response = operation.result()
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_PipelineService_RunPipeline_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_async.py
new file mode 100644
index 000000000000..5ca725145ebc
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_create_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateRuleSetRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = await client.create_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_sync.py
new file mode 100644
index 000000000000..10e997e18d91
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_create_rule_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_create_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateRuleSetRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = client.create_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_async.py
new file mode 100644
index 000000000000..0821947cc01d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_async.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_delete_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    await client.delete_rule_set(request=request)
+
+
+# [END contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_sync.py
new file mode 100644
index 000000000000..02e389af6c2e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_delete_rule_set_sync.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_delete_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    client.delete_rule_set(request=request)
+
+
+# [END contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_async.py
new file mode 100644
index 000000000000..8c8e6c0f7886
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_GetRuleSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_get_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.get_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_GetRuleSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_sync.py
new file mode 100644
index 000000000000..2393eb5b41b4
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_get_rule_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_GetRuleSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_get_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.get_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_GetRuleSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_async.py
new file mode 100644
index 000000000000..fd971e10f8e1
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_async.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListRuleSets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_ListRuleSets_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_list_rule_sets():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListRuleSetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_rule_sets(request=request)
+
+    # Handle the response
+    async for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_ListRuleSets_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_sync.py
new file mode 100644
index 000000000000..997b2900559f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_list_rule_sets_sync.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListRuleSets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_ListRuleSets_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_list_rule_sets():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListRuleSetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_rule_sets(request=request)
+
+    # Handle the response
+    for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_ListRuleSets_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_async.py
new file mode 100644
index 000000000000..a183501cbe61
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_update_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.UpdateRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.update_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_sync.py
new file mode 100644
index 000000000000..9f25c3226bbe
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_rule_set_service_update_rule_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateRuleSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_update_rule_set():
+    # Create a client
+    client = contentwarehouse_v1.RuleSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.UpdateRuleSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.update_rule_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_async.py
new file mode 100644
index 000000000000..71851f648c84
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_create_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateSynonymSetRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = await client.create_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_sync.py
new file mode 100644
index 000000000000..8fee73d9fa81
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for CreateSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_create_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.CreateSynonymSetRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    response = client.create_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_async.py
new file mode 100644
index 000000000000..3760b92f07cb
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_async.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_delete_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    await client.delete_synonym_set(request=request)
+
+
+# [END contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_sync.py
new file mode 100644
index 000000000000..8d83c644a233
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_sync.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for DeleteSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_delete_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.DeleteSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    client.delete_synonym_set(request=request)
+
+
+# [END contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_async.py
new file mode 100644
index 000000000000..e9b3b2b76ff0
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_get_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.get_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_sync.py
new file mode 100644
index 000000000000..e62627990b2f
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for GetSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_get_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.GetSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.get_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_async.py
new file mode 100644
index 000000000000..53882d4c723e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_async.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListSynonymSets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_list_synonym_sets():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListSynonymSetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_synonym_sets(request=request)
+
+    # Handle the response
+    async for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_sync.py
new file mode 100644
index 000000000000..7579e8c230d2
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_sync.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for ListSynonymSets
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_list_synonym_sets():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.ListSynonymSetsRequest(
+        parent="parent_value",
+    )
+
+    # Make the request
+    page_result = client.list_synonym_sets(request=request)
+
+    # Handle the response
+    for response in page_result:
+        print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_async.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_async.py
new file mode 100644
index 000000000000..591a84c8d64e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_async.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_async]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+async def sample_update_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceAsyncClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.UpdateSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = await client.update_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_async]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_sync.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_sync.py
new file mode 100644
index 000000000000..b6bfd1d4c497
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_sync.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Generated code. DO NOT EDIT!
+#
+# Snippet for UpdateSynonymSet
+# NOTE: This snippet has been automatically generated for illustrative purposes only.
+# It may require modifications to work in your environment.
+
+# To install the latest published package dependency, execute the following:
+#   python3 -m pip install google-cloud-contentwarehouse
+
+
+# [START contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_sync]
+# This snippet has been automatically generated and should be regarded as a
+# code template only.
+# It will require modifications to work:
+# - It may require correct/in-range values for request initialization.
+# - It may require specifying regional endpoints when creating the service
+#   client as shown in:
+#   https://googleapis.dev/python/google-api-core/latest/client_options.html
+from google.cloud import contentwarehouse_v1
+
+
+def sample_update_synonym_set():
+    # Create a client
+    client = contentwarehouse_v1.SynonymSetServiceClient()
+
+    # Initialize request argument(s)
+    request = contentwarehouse_v1.UpdateSynonymSetRequest(
+        name="name_value",
+    )
+
+    # Make the request
+    response = client.update_synonym_set(request=request)
+
+    # Handle the response
+    print(response)
+
+# [END contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_sync]
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json
new file mode 100644
index 000000000000..4e798f93791e
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json
@@ -0,0 +1,4573 @@
+{
+  "clientLibrary": {
+    "apis": [
+      {
+        "id": "google.cloud.contentwarehouse.v1",
+        "version": "v1"
+      }
+    ],
+    "language": "PYTHON",
+    "name": "google-cloud-contentwarehouse",
+    "version": "0.1.0"
+  },
+  "snippets": [
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient",
+          "shortName": "DocumentLinkServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient.create_document_link",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.CreateDocumentLink",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "CreateDocumentLink"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentLinkRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document_link",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentLink"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentLink",
+        "shortName": "create_document_link"
+      },
+      "description": "Sample for CreateDocumentLink",
+      "file": "contentwarehouse_v1_generated_document_link_service_create_document_link_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_create_document_link_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient",
+          "shortName": "DocumentLinkServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient.create_document_link",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.CreateDocumentLink",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "CreateDocumentLink"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentLinkRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document_link",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentLink"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentLink",
+        "shortName": "create_document_link"
+      },
+      "description": "Sample for CreateDocumentLink",
+      "file": "contentwarehouse_v1_generated_document_link_service_create_document_link_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_CreateDocumentLink_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_create_document_link_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient",
+          "shortName": "DocumentLinkServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient.delete_document_link",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.DeleteDocumentLink",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "DeleteDocumentLink"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentLinkRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document_link"
+      },
+      "description": "Sample for DeleteDocumentLink",
+      "file": "contentwarehouse_v1_generated_document_link_service_delete_document_link_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_async",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_delete_document_link_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient",
+          "shortName": "DocumentLinkServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient.delete_document_link",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.DeleteDocumentLink",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "DeleteDocumentLink"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentLinkRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document_link"
+      },
+      "description": "Sample for DeleteDocumentLink",
+      "file": "contentwarehouse_v1_generated_document_link_service_delete_document_link_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_DeleteDocumentLink_sync",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_delete_document_link_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient",
+          "shortName": "DocumentLinkServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient.list_linked_sources",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.ListLinkedSources",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "ListLinkedSources"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_link_service.pagers.ListLinkedSourcesAsyncPager",
+        "shortName": "list_linked_sources"
+      },
+      "description": "Sample for ListLinkedSources",
+      "file": "contentwarehouse_v1_generated_document_link_service_list_linked_sources_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_async",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_list_linked_sources_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient",
+          "shortName": "DocumentLinkServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient.list_linked_sources",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.ListLinkedSources",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "ListLinkedSources"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListLinkedSourcesRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_link_service.pagers.ListLinkedSourcesPager",
+        "shortName": "list_linked_sources"
+      },
+      "description": "Sample for ListLinkedSources",
+      "file": "contentwarehouse_v1_generated_document_link_service_list_linked_sources_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_ListLinkedSources_sync",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_list_linked_sources_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient",
+          "shortName": "DocumentLinkServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceAsyncClient.list_linked_targets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.ListLinkedTargets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "ListLinkedTargets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListLinkedTargetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.ListLinkedTargetsResponse",
+        "shortName": "list_linked_targets"
+      },
+      "description": "Sample for ListLinkedTargets",
+      "file": "contentwarehouse_v1_generated_document_link_service_list_linked_targets_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_list_linked_targets_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient",
+          "shortName": "DocumentLinkServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentLinkServiceClient.list_linked_targets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService.ListLinkedTargets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentLinkService",
+            "shortName": "DocumentLinkService"
+          },
+          "shortName": "ListLinkedTargets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListLinkedTargetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.ListLinkedTargetsResponse",
+        "shortName": "list_linked_targets"
+      },
+      "description": "Sample for ListLinkedTargets",
+      "file": "contentwarehouse_v1_generated_document_link_service_list_linked_targets_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentLinkService_ListLinkedTargets_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_link_service_list_linked_targets_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient",
+          "shortName": "DocumentSchemaServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient.create_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.CreateDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "CreateDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentSchemaRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document_schema",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentSchema"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "create_document_schema"
+      },
+      "description": "Sample for CreateDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_create_document_schema_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_async",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 49,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 50,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_create_document_schema_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient",
+          "shortName": "DocumentSchemaServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient.create_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.CreateDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "CreateDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentSchemaRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document_schema",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentSchema"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "create_document_schema"
+      },
+      "description": "Sample for CreateDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_create_document_schema_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_CreateDocumentSchema_sync",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 49,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 50,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_create_document_schema_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient",
+          "shortName": "DocumentSchemaServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient.delete_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.DeleteDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "DeleteDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document_schema"
+      },
+      "description": "Sample for DeleteDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_delete_document_schema_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_async",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_delete_document_schema_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient",
+          "shortName": "DocumentSchemaServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient.delete_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.DeleteDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "DeleteDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document_schema"
+      },
+      "description": "Sample for DeleteDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_delete_document_schema_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_DeleteDocumentSchema_sync",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_delete_document_schema_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient",
+          "shortName": "DocumentSchemaServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient.get_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.GetDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "GetDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "get_document_schema"
+      },
+      "description": "Sample for GetDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_get_document_schema_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_get_document_schema_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient",
+          "shortName": "DocumentSchemaServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient.get_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.GetDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "GetDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "get_document_schema"
+      },
+      "description": "Sample for GetDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_get_document_schema_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_GetDocumentSchema_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_get_document_schema_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient",
+          "shortName": "DocumentSchemaServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient.list_document_schemas",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.ListDocumentSchemas",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "ListDocumentSchemas"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_schema_service.pagers.ListDocumentSchemasAsyncPager",
+        "shortName": "list_document_schemas"
+      },
+      "description": "Sample for ListDocumentSchemas",
+      "file": "contentwarehouse_v1_generated_document_schema_service_list_document_schemas_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_async",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_list_document_schemas_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient",
+          "shortName": "DocumentSchemaServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient.list_document_schemas",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.ListDocumentSchemas",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "ListDocumentSchemas"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListDocumentSchemasRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_schema_service.pagers.ListDocumentSchemasPager",
+        "shortName": "list_document_schemas"
+      },
+      "description": "Sample for ListDocumentSchemas",
+      "file": "contentwarehouse_v1_generated_document_schema_service_list_document_schemas_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_ListDocumentSchemas_sync",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_list_document_schemas_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient",
+          "shortName": "DocumentSchemaServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceAsyncClient.update_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.UpdateDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "UpdateDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "document_schema",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentSchema"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "update_document_schema"
+      },
+      "description": "Sample for UpdateDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_update_document_schema_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_async",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 49,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 50,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_update_document_schema_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient",
+          "shortName": "DocumentSchemaServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentSchemaServiceClient.update_document_schema",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService.UpdateDocumentSchema",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentSchemaService",
+            "shortName": "DocumentSchemaService"
+          },
+          "shortName": "UpdateDocumentSchema"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateDocumentSchemaRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "document_schema",
+            "type": "google.cloud.contentwarehouse_v1.types.DocumentSchema"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.DocumentSchema",
+        "shortName": "update_document_schema"
+      },
+      "description": "Sample for UpdateDocumentSchema",
+      "file": "contentwarehouse_v1_generated_document_schema_service_update_document_schema_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentSchemaService_UpdateDocumentSchema_sync",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 49,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 50,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_schema_service_update_document_schema_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.create_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.CreateDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "CreateDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document",
+            "type": "google.cloud.contentwarehouse_v1.types.Document"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.CreateDocumentResponse",
+        "shortName": "create_document"
+      },
+      "description": "Sample for CreateDocument",
+      "file": "contentwarehouse_v1_generated_document_service_create_document_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_CreateDocument_async",
+      "segments": [
+        {
+          "end": 57,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 57,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 51,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 54,
+          "start": 52,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 58,
+          "start": 55,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_create_document_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.create_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.CreateDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "CreateDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateDocumentRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "document",
+            "type": "google.cloud.contentwarehouse_v1.types.Document"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.CreateDocumentResponse",
+        "shortName": "create_document"
+      },
+      "description": "Sample for CreateDocument",
+      "file": "contentwarehouse_v1_generated_document_service_create_document_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_CreateDocument_sync",
+      "segments": [
+        {
+          "end": 57,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 57,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 51,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 54,
+          "start": 52,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 58,
+          "start": 55,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_create_document_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.delete_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.DeleteDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "DeleteDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document"
+      },
+      "description": "Sample for DeleteDocument",
+      "file": "contentwarehouse_v1_generated_document_service_delete_document_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_DeleteDocument_async",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_delete_document_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.delete_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.DeleteDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "DeleteDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_document"
+      },
+      "description": "Sample for DeleteDocument",
+      "file": "contentwarehouse_v1_generated_document_service_delete_document_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_DeleteDocument_sync",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_delete_document_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.fetch_acl",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.FetchAcl",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "FetchAcl"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.FetchAclRequest"
+          },
+          {
+            "name": "resource",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.FetchAclResponse",
+        "shortName": "fetch_acl"
+      },
+      "description": "Sample for FetchAcl",
+      "file": "contentwarehouse_v1_generated_document_service_fetch_acl_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_FetchAcl_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_fetch_acl_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.fetch_acl",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.FetchAcl",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "FetchAcl"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.FetchAclRequest"
+          },
+          {
+            "name": "resource",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.FetchAclResponse",
+        "shortName": "fetch_acl"
+      },
+      "description": "Sample for FetchAcl",
+      "file": "contentwarehouse_v1_generated_document_service_fetch_acl_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_FetchAcl_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_fetch_acl_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.get_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.GetDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "GetDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.Document",
+        "shortName": "get_document"
+      },
+      "description": "Sample for GetDocument",
+      "file": "contentwarehouse_v1_generated_document_service_get_document_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_GetDocument_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_get_document_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.get_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.GetDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "GetDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.Document",
+        "shortName": "get_document"
+      },
+      "description": "Sample for GetDocument",
+      "file": "contentwarehouse_v1_generated_document_service_get_document_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_GetDocument_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_get_document_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.lock_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.LockDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "LockDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.LockDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.Document",
+        "shortName": "lock_document"
+      },
+      "description": "Sample for LockDocument",
+      "file": "contentwarehouse_v1_generated_document_service_lock_document_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_LockDocument_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_lock_document_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.lock_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.LockDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "LockDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.LockDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.Document",
+        "shortName": "lock_document"
+      },
+      "description": "Sample for LockDocument",
+      "file": "contentwarehouse_v1_generated_document_service_lock_document_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_LockDocument_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_lock_document_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.search_documents",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.SearchDocuments",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "SearchDocuments"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_service.pagers.SearchDocumentsAsyncPager",
+        "shortName": "search_documents"
+      },
+      "description": "Sample for SearchDocuments",
+      "file": "contentwarehouse_v1_generated_document_service_search_documents_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_SearchDocuments_async",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_search_documents_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.search_documents",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.SearchDocuments",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "SearchDocuments"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.SearchDocumentsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.document_service.pagers.SearchDocumentsPager",
+        "shortName": "search_documents"
+      },
+      "description": "Sample for SearchDocuments",
+      "file": "contentwarehouse_v1_generated_document_service_search_documents_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_SearchDocuments_sync",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_search_documents_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.set_acl",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.SetAcl",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "SetAcl"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.SetAclRequest"
+          },
+          {
+            "name": "resource",
+            "type": "str"
+          },
+          {
+            "name": "policy",
+            "type": "google.iam.v1.policy_pb2.Policy"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SetAclResponse",
+        "shortName": "set_acl"
+      },
+      "description": "Sample for SetAcl",
+      "file": "contentwarehouse_v1_generated_document_service_set_acl_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_SetAcl_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_set_acl_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.set_acl",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.SetAcl",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "SetAcl"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.SetAclRequest"
+          },
+          {
+            "name": "resource",
+            "type": "str"
+          },
+          {
+            "name": "policy",
+            "type": "google.iam.v1.policy_pb2.Policy"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SetAclResponse",
+        "shortName": "set_acl"
+      },
+      "description": "Sample for SetAcl",
+      "file": "contentwarehouse_v1_generated_document_service_set_acl_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_SetAcl_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_set_acl_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient",
+          "shortName": "DocumentServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceAsyncClient.update_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.UpdateDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "UpdateDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "document",
+            "type": "google.cloud.contentwarehouse_v1.types.Document"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.UpdateDocumentResponse",
+        "shortName": "update_document"
+      },
+      "description": "Sample for UpdateDocument",
+      "file": "contentwarehouse_v1_generated_document_service_update_document_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_UpdateDocument_async",
+      "segments": [
+        {
+          "end": 57,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 57,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 51,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 54,
+          "start": 52,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 58,
+          "start": 55,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_update_document_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient",
+          "shortName": "DocumentServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.DocumentServiceClient.update_document",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.DocumentService.UpdateDocument",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.DocumentService",
+            "shortName": "DocumentService"
+          },
+          "shortName": "UpdateDocument"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateDocumentRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "document",
+            "type": "google.cloud.contentwarehouse_v1.types.Document"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.UpdateDocumentResponse",
+        "shortName": "update_document"
+      },
+      "description": "Sample for UpdateDocument",
+      "file": "contentwarehouse_v1_generated_document_service_update_document_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_DocumentService_UpdateDocument_sync",
+      "segments": [
+        {
+          "end": 57,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 57,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 51,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 54,
+          "start": 52,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 58,
+          "start": 55,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_document_service_update_document_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceAsyncClient",
+          "shortName": "PipelineServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceAsyncClient.run_pipeline",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.PipelineService.RunPipeline",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.PipelineService",
+            "shortName": "PipelineService"
+          },
+          "shortName": "RunPipeline"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.RunPipelineRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.api_core.operation_async.AsyncOperation",
+        "shortName": "run_pipeline"
+      },
+      "description": "Sample for RunPipeline",
+      "file": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_PipelineService_RunPipeline_async",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceClient",
+          "shortName": "PipelineServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.PipelineServiceClient.run_pipeline",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.PipelineService.RunPipeline",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.PipelineService",
+            "shortName": "PipelineService"
+          },
+          "shortName": "RunPipeline"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.RunPipelineRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.api_core.operation.Operation",
+        "shortName": "run_pipeline"
+      },
+      "description": "Sample for RunPipeline",
+      "file": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_PipelineService_RunPipeline_sync",
+      "segments": [
+        {
+          "end": 55,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 55,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 52,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 56,
+          "start": 53,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_pipeline_service_run_pipeline_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient",
+          "shortName": "RuleSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient.create_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.CreateRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "CreateRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateRuleSetRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "rule_set",
+            "type": "google.cloud.contentwarehouse_v1.types.RuleSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "create_rule_set"
+      },
+      "description": "Sample for CreateRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_create_rule_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_create_rule_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient",
+          "shortName": "RuleSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient.create_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.CreateRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "CreateRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateRuleSetRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "rule_set",
+            "type": "google.cloud.contentwarehouse_v1.types.RuleSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "create_rule_set"
+      },
+      "description": "Sample for CreateRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_create_rule_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_CreateRuleSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_create_rule_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient",
+          "shortName": "RuleSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient.delete_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.DeleteRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "DeleteRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_rule_set"
+      },
+      "description": "Sample for DeleteRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_delete_rule_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_async",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_delete_rule_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient",
+          "shortName": "RuleSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient.delete_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.DeleteRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "DeleteRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_rule_set"
+      },
+      "description": "Sample for DeleteRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_delete_rule_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_DeleteRuleSet_sync",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_delete_rule_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient",
+          "shortName": "RuleSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient.get_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.GetRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "GetRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "get_rule_set"
+      },
+      "description": "Sample for GetRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_get_rule_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_GetRuleSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_get_rule_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient",
+          "shortName": "RuleSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient.get_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.GetRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "GetRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "get_rule_set"
+      },
+      "description": "Sample for GetRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_get_rule_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_GetRuleSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_get_rule_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient",
+          "shortName": "RuleSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient.list_rule_sets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.ListRuleSets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "ListRuleSets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.rule_set_service.pagers.ListRuleSetsAsyncPager",
+        "shortName": "list_rule_sets"
+      },
+      "description": "Sample for ListRuleSets",
+      "file": "contentwarehouse_v1_generated_rule_set_service_list_rule_sets_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_ListRuleSets_async",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_list_rule_sets_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient",
+          "shortName": "RuleSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient.list_rule_sets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.ListRuleSets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "ListRuleSets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListRuleSetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.rule_set_service.pagers.ListRuleSetsPager",
+        "shortName": "list_rule_sets"
+      },
+      "description": "Sample for ListRuleSets",
+      "file": "contentwarehouse_v1_generated_rule_set_service_list_rule_sets_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_ListRuleSets_sync",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_list_rule_sets_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient",
+          "shortName": "RuleSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceAsyncClient.update_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.UpdateRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "UpdateRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "rule_set",
+            "type": "google.cloud.contentwarehouse_v1.types.RuleSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "update_rule_set"
+      },
+      "description": "Sample for UpdateRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_update_rule_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_update_rule_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient",
+          "shortName": "RuleSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.RuleSetServiceClient.update_rule_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.RuleSetService.UpdateRuleSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.RuleSetService",
+            "shortName": "RuleSetService"
+          },
+          "shortName": "UpdateRuleSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateRuleSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "rule_set",
+            "type": "google.cloud.contentwarehouse_v1.types.RuleSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.RuleSet",
+        "shortName": "update_rule_set"
+      },
+      "description": "Sample for UpdateRuleSet",
+      "file": "contentwarehouse_v1_generated_rule_set_service_update_rule_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_RuleSetService_UpdateRuleSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_rule_set_service_update_rule_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient",
+          "shortName": "SynonymSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient.create_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.CreateSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "CreateSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateSynonymSetRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "synonym_set",
+            "type": "google.cloud.contentwarehouse_v1.types.SynonymSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "create_synonym_set"
+      },
+      "description": "Sample for CreateSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient",
+          "shortName": "SynonymSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient.create_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.CreateSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "CreateSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.CreateSynonymSetRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "synonym_set",
+            "type": "google.cloud.contentwarehouse_v1.types.SynonymSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "create_synonym_set"
+      },
+      "description": "Sample for CreateSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_CreateSynonymSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_create_synonym_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient",
+          "shortName": "SynonymSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient.delete_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.DeleteSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "DeleteSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_synonym_set"
+      },
+      "description": "Sample for DeleteSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_async",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient",
+          "shortName": "SynonymSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient.delete_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.DeleteSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "DeleteSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.DeleteSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "shortName": "delete_synonym_set"
+      },
+      "description": "Sample for DeleteSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_DeleteSynonymSet_sync",
+      "segments": [
+        {
+          "end": 49,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 49,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 50,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_delete_synonym_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient",
+          "shortName": "SynonymSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient.get_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.GetSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "GetSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "get_synonym_set"
+      },
+      "description": "Sample for GetSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient",
+          "shortName": "SynonymSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient.get_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.GetSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "GetSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.GetSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "get_synonym_set"
+      },
+      "description": "Sample for GetSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_GetSynonymSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_get_synonym_set_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient",
+          "shortName": "SynonymSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient.list_synonym_sets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.ListSynonymSets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "ListSynonymSets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.synonym_set_service.pagers.ListSynonymSetsAsyncPager",
+        "shortName": "list_synonym_sets"
+      },
+      "description": "Sample for ListSynonymSets",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_async",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient",
+          "shortName": "SynonymSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient.list_synonym_sets",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.ListSynonymSets",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "ListSynonymSets"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.ListSynonymSetsRequest"
+          },
+          {
+            "name": "parent",
+            "type": "str"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.services.synonym_set_service.pagers.ListSynonymSetsPager",
+        "shortName": "list_synonym_sets"
+      },
+      "description": "Sample for ListSynonymSets",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_ListSynonymSets_sync",
+      "segments": [
+        {
+          "end": 52,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 52,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 53,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_list_synonym_sets_sync.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "async": true,
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient",
+          "shortName": "SynonymSetServiceAsyncClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceAsyncClient.update_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.UpdateSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "UpdateSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "synonym_set",
+            "type": "google.cloud.contentwarehouse_v1.types.SynonymSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "update_synonym_set"
+      },
+      "description": "Sample for UpdateSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_async.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_async",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_async.py"
+    },
+    {
+      "canonical": true,
+      "clientMethod": {
+        "client": {
+          "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient",
+          "shortName": "SynonymSetServiceClient"
+        },
+        "fullName": "google.cloud.contentwarehouse_v1.SynonymSetServiceClient.update_synonym_set",
+        "method": {
+          "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService.UpdateSynonymSet",
+          "service": {
+            "fullName": "google.cloud.contentwarehouse.v1.SynonymSetService",
+            "shortName": "SynonymSetService"
+          },
+          "shortName": "UpdateSynonymSet"
+        },
+        "parameters": [
+          {
+            "name": "request",
+            "type": "google.cloud.contentwarehouse_v1.types.UpdateSynonymSetRequest"
+          },
+          {
+            "name": "name",
+            "type": "str"
+          },
+          {
+            "name": "synonym_set",
+            "type": "google.cloud.contentwarehouse_v1.types.SynonymSet"
+          },
+          {
+            "name": "retry",
+            "type": "google.api_core.retry.Retry"
+          },
+          {
+            "name": "timeout",
+            "type": "float"
+          },
+          {
+            "name": "metadata",
+            "type": "Sequence[Tuple[str, str]"
+          }
+        ],
+        "resultType": "google.cloud.contentwarehouse_v1.types.SynonymSet",
+        "shortName": "update_synonym_set"
+      },
+      "description": "Sample for UpdateSynonymSet",
+      "file": "contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_sync.py",
+      "language": "PYTHON",
+      "origin": "API_DEFINITION",
+      "regionTag": "contentwarehouse_v1_generated_SynonymSetService_UpdateSynonymSet_sync",
+      "segments": [
+        {
+          "end": 51,
+          "start": 27,
+          "type": "FULL"
+        },
+        {
+          "end": 51,
+          "start": 27,
+          "type": "SHORT"
+        },
+        {
+          "end": 40,
+          "start": 38,
+          "type": "CLIENT_INITIALIZATION"
+        },
+        {
+          "end": 45,
+          "start": 41,
+          "type": "REQUEST_INITIALIZATION"
+        },
+        {
+          "end": 48,
+          "start": 46,
+          "type": "REQUEST_EXECUTION"
+        },
+        {
+          "end": 52,
+          "start": 49,
+          "type": "RESPONSE_HANDLING"
+        }
+      ],
+      "title": "contentwarehouse_v1_generated_synonym_set_service_update_synonym_set_sync.py"
+    }
+  ]
+}
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/scripts/fixup_contentwarehouse_v1_keywords.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/scripts/fixup_contentwarehouse_v1_keywords.py
new file mode 100644
index 000000000000..893f5e98e3c0
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/scripts/fixup_contentwarehouse_v1_keywords.py
@@ -0,0 +1,203 @@
+#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+    predicate: Callable[[Any], bool],
+    iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+    """A stable, out-of-place partition."""
+    results = ([], [])
+
+    for i in iterator:
+        results[int(predicate(i))].append(i)
+
+    # Returns trueList, falseList
+    return results[1], results[0]
+
+
+class contentwarehouseCallTransformer(cst.CSTTransformer):
+    CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+    METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+        'create_document': ('parent', 'document', 'request_metadata', 'policy', 'cloud_ai_document_option', 'create_mask', ),
+        'create_document_link': ('parent', 'document_link', 'request_metadata', ),
+        'create_document_schema': ('parent', 'document_schema', ),
+        'create_rule_set': ('parent', 'rule_set', ),
+        'create_synonym_set': ('parent', 'synonym_set', ),
+        'delete_document': ('name', 'request_metadata', ),
+        'delete_document_link': ('name', 'request_metadata', ),
+        'delete_document_schema': ('name', ),
+        'delete_rule_set': ('name', ),
+        'delete_synonym_set': ('name', ),
+        'fetch_acl': ('resource', 'request_metadata', 'project_owner', ),
+        'get_document': ('name', 'request_metadata', ),
+        'get_document_schema': ('name', ),
+        'get_rule_set': ('name', ),
+        'get_synonym_set': ('name', ),
+        'list_document_schemas': ('parent', 'page_size', 'page_token', ),
+        'list_linked_sources': ('parent', 'page_size', 'page_token', 'request_metadata', ),
+        'list_linked_targets': ('parent', 'request_metadata', ),
+        'list_rule_sets': ('parent', 'page_size', 'page_token', ),
+        'list_synonym_sets': ('parent', 'page_size', 'page_token', ),
+        'lock_document': ('name', 'collection_id', 'locking_user', ),
+        'run_pipeline': ('name', 'gcs_ingest_pipeline', 'gcs_ingest_with_doc_ai_processors_pipeline', 'export_cdw_pipeline', 'process_with_doc_ai_pipeline', 'request_metadata', ),
+        'search_documents': ('parent', 'request_metadata', 'document_query', 'offset', 'page_size', 'page_token', 'order_by', 'histogram_queries', 'require_total_size', 'total_result_size', 'qa_size_limit', ),
+        'set_acl': ('resource', 'policy', 'request_metadata', 'project_owner', ),
+        'update_document': ('name', 'document', 'request_metadata', 'cloud_ai_document_option', 'update_options', ),
+        'update_document_schema': ('name', 'document_schema', ),
+        'update_rule_set': ('name', 'rule_set', ),
+        'update_synonym_set': ('name', 'synonym_set', ),
+    }
+
+    def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+        try:
+            key = original.func.attr.value
+            kword_params = self.METHOD_TO_PARAMS[key]
+        except (AttributeError, KeyError):
+            # Either not a method from the API or too convoluted to be sure.
+            return updated
+
+        # If the existing code is valid, keyword args come after positional args.
+        # Therefore, all positional args must map to the first parameters.
+        args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+        if any(k.keyword.value == "request" for k in kwargs):
+            # We've already fixed this file, don't fix it again.
+            return updated
+
+        kwargs, ctrl_kwargs = partition(
+            lambda a: a.keyword.value not in self.CTRL_PARAMS,
+            kwargs
+        )
+
+        args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+        ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+                           for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+        request_arg = cst.Arg(
+            value=cst.Dict([
+                cst.DictElement(
+                    cst.SimpleString("'{}'".format(name)),
+cst.Element(value=arg.value)
+                )
+                # Note: the args + kwargs looks silly, but keep in mind that
+                # the control parameters had to be stripped out, and that
+                # those could have been passed positionally or by keyword.
+                for name, arg in zip(kword_params, args + kwargs)]),
+            keyword=cst.Name("request")
+        )
+
+        return updated.with_changes(
+            args=[request_arg] + ctrl_kwargs
+        )
+
+
+def fix_files(
+    in_dir: pathlib.Path,
+    out_dir: pathlib.Path,
+    *,
+    transformer=contentwarehouseCallTransformer(),
+):
+    """Duplicate the input dir to the output dir, fixing file method calls.
+
+    Preconditions:
+    * in_dir is a real directory
+    * out_dir is a real, empty directory
+    """
+    pyfile_gen = (
+        pathlib.Path(os.path.join(root, f))
+        for root, _, files in os.walk(in_dir)
+        for f in files if os.path.splitext(f)[1] == ".py"
+    )
+
+    for fpath in pyfile_gen:
+        with open(fpath, 'r') as f:
+            src = f.read()
+
+        # Parse the code and insert method call fixes.
+        tree = cst.parse_module(src)
+        updated = tree.visit(transformer)
+
+        # Create the path and directory structure for the new file.
+        updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+        updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+        # Generate the updated source file at the corresponding path.
+        with open(updated_path, 'w') as f:
+            f.write(updated.code)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(
+        description="""Fix up source that uses the contentwarehouse client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+      parameters in client method calls to keyword based parameters.
+      Cases where it WILL FAIL include
+      A) * or ** expansion in a method call.
+      B) Calls via function or method alias (includes free function calls)
+      C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+      These all constitute false negatives. The tool will also detect false
+      positives when an API method shares a name with another method.
+""")
+    parser.add_argument(
+        '-d',
+        '--input-directory',
+        required=True,
+        dest='input_dir',
+        help='the input directory to walk for python files to fix up',
+    )
+    parser.add_argument(
+        '-o',
+        '--output-directory',
+        required=True,
+        dest='output_dir',
+        help='the directory to output files fixed via un-flattening',
+    )
+    args = parser.parse_args()
+    input_dir = pathlib.Path(args.input_dir)
+    output_dir = pathlib.Path(args.output_dir)
+    if not input_dir.is_dir():
+        print(
+            f"input directory '{input_dir}' does not exist or is not a directory",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    if not output_dir.is_dir():
+        print(
+            f"output directory '{output_dir}' does not exist or is not a directory",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    if os.listdir(output_dir):
+        print(
+            f"output directory '{output_dir}' is not empty",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    fix_files(input_dir, output_dir)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/setup.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/setup.py
new file mode 100644
index 000000000000..1f992118a62d
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/setup.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import io
+import os
+import re
+
+import setuptools # type: ignore
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+name = 'google-cloud-contentwarehouse'
+
+
+description = "Google Cloud Contentwarehouse API client library"
+
+version = None
+
+with open(os.path.join(package_root, 'google/cloud/contentwarehouse/gapic_version.py')) as fp:
+    version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read())
+    assert (len(version_candidates) == 1)
+    version = version_candidates[0]
+
+if version[0] == "0":
+    release_status = "Development Status :: 4 - Beta"
+else:
+    release_status = "Development Status :: 5 - Production/Stable"
+
+dependencies = [
+    "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*",
+    # Exclude incompatible versions of `google-auth`
+    # See https://github.com/googleapis/google-cloud-python/issues/12364
+    "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0",
+    "proto-plus >= 1.22.3, <2.0.0dev",
+    "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'",
+    "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
+    "google-cloud-documentai >= 2.0.0, <4.0.0dev",
+    "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev",
+]
+extras = {
+}
+url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse"
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+readme_filename = os.path.join(package_root, "README.rst")
+with io.open(readme_filename, encoding="utf-8") as readme_file:
+    readme = readme_file.read()
+
+packages = [
+    package
+    for package in setuptools.find_namespace_packages()
+    if package.startswith("google")
+]
+
+setuptools.setup(
+    name=name,
+    version=version,
+    description=description,
+    long_description=readme,
+    author="Google LLC",
+    author_email="googleapis-packages@google.com",
+    license="Apache 2.0",
+    url=url,
+    classifiers=[
+        release_status,
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: Apache Software License",
+        "Programming Language :: Python",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.7",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
+        "Programming Language :: Python :: 3.13",
+        "Operating System :: OS Independent",
+        "Topic :: Internet",
+    ],
+    platforms="Posix; MacOS X; Windows",
+    packages=packages,
+    python_requires=">=3.7",
+    install_requires=dependencies,
+    extras_require=extras,
+    include_package_data=True,
+    zip_safe=False,
+)
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.10.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.10.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.11.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.11.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.12.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.12.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.13.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.13.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.7.txt
new file mode 100644
index 000000000000..e03d5841585a
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.7.txt
@@ -0,0 +1,12 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List all library dependencies and extras in this file.
+# Pin the version to the lower bound.
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have google-cloud-foo==1.14.0
+google-api-core==1.34.1
+google-auth==2.14.1
+proto-plus==1.22.3
+protobuf==3.20.2
+google-cloud-documentai==2.0.0
+grpc-google-iam-v1==0.12.4
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.8.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.8.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.9.txt
new file mode 100644
index 000000000000..0397b143465c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/testing/constraints-3.9.txt
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+proto-plus
+protobuf
+google-cloud-documentai
+grpc-google-iam-v1
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/__init__.py
new file mode 100644
index 000000000000..7b3de3117f38
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/__init__.py
@@ -0,0 +1,16 @@
+
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/__init__.py
new file mode 100644
index 000000000000..7b3de3117f38
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/__init__.py
@@ -0,0 +1,16 @@
+
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/__init__.py
new file mode 100644
index 000000000000..7b3de3117f38
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/__init__.py
@@ -0,0 +1,16 @@
+
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/__init__.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/__init__.py
new file mode 100644
index 000000000000..7b3de3117f38
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/__init__.py
@@ -0,0 +1,16 @@
+
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py
new file mode 100644
index 000000000000..a9a00a593813
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py
@@ -0,0 +1,4565 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.document_link_service import DocumentLinkServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.document_link_service import DocumentLinkServiceClient
+from google.cloud.contentwarehouse_v1.services.document_link_service import pagers
+from google.cloud.contentwarehouse_v1.services.document_link_service import transports
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document
+from google.cloud.contentwarehouse_v1.types import document_link_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+from google.protobuf import timestamp_pb2  # type: ignore
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(None) is None
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert DocumentLinkServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert DocumentLinkServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            DocumentLinkServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            DocumentLinkServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert DocumentLinkServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert DocumentLinkServiceClient._get_client_cert_source(None, False) is None
+    assert DocumentLinkServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert DocumentLinkServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert DocumentLinkServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert DocumentLinkServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(DocumentLinkServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceClient))
+@mock.patch.object(DocumentLinkServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert DocumentLinkServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert DocumentLinkServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DocumentLinkServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentLinkServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert DocumentLinkServiceClient._get_api_endpoint(None, None, default_universe, "always") == DocumentLinkServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentLinkServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DocumentLinkServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentLinkServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert DocumentLinkServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        DocumentLinkServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert DocumentLinkServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert DocumentLinkServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert DocumentLinkServiceClient._get_universe_domain(None, None) == DocumentLinkServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        DocumentLinkServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentLinkServiceClient, "grpc"),
+    (DocumentLinkServiceAsyncClient, "grpc_asyncio"),
+    (DocumentLinkServiceClient, "rest"),
+])
+def test_document_link_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.DocumentLinkServiceGrpcTransport, "grpc"),
+    (transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.DocumentLinkServiceRestTransport, "rest"),
+])
+def test_document_link_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentLinkServiceClient, "grpc"),
+    (DocumentLinkServiceAsyncClient, "grpc_asyncio"),
+    (DocumentLinkServiceClient, "rest"),
+])
+def test_document_link_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_document_link_service_client_get_transport_class():
+    transport = DocumentLinkServiceClient.get_transport_class()
+    available_transports = [
+        transports.DocumentLinkServiceGrpcTransport,
+        transports.DocumentLinkServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = DocumentLinkServiceClient.get_transport_class("grpc")
+    assert transport == transports.DocumentLinkServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc"),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest"),
+])
+@mock.patch.object(DocumentLinkServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceClient))
+@mock.patch.object(DocumentLinkServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceAsyncClient))
+def test_document_link_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(DocumentLinkServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(DocumentLinkServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc", "true"),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc", "false"),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest", "true"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(DocumentLinkServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceClient))
+@mock.patch.object(DocumentLinkServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_document_link_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    DocumentLinkServiceClient, DocumentLinkServiceAsyncClient
+])
+@mock.patch.object(DocumentLinkServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentLinkServiceClient))
+@mock.patch.object(DocumentLinkServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentLinkServiceAsyncClient))
+def test_document_link_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    DocumentLinkServiceClient, DocumentLinkServiceAsyncClient
+])
+@mock.patch.object(DocumentLinkServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceClient))
+@mock.patch.object(DocumentLinkServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentLinkServiceAsyncClient))
+def test_document_link_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = DocumentLinkServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentLinkServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc"),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest"),
+])
+def test_document_link_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceRestTransport, "rest", None),
+])
+def test_document_link_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_document_link_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = DocumentLinkServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_document_link_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.ListLinkedTargetsRequest,
+  dict,
+])
+def test_list_linked_targets(request_type, transport: str = 'grpc'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedTargetsResponse(
+            next_page_token='next_page_token_value',
+        )
+        response = client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.ListLinkedTargetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response.raw_page is response
+    assert isinstance(response, document_link_service.ListLinkedTargetsResponse)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+def test_list_linked_targets_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_link_service.ListLinkedTargetsRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.list_linked_targets(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_link_service.ListLinkedTargetsRequest(
+            parent='parent_value',
+        )
+
+def test_list_linked_targets_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_linked_targets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_linked_targets] = mock_rpc
+        request = {}
+        client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_linked_targets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.list_linked_targets in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.list_linked_targets] = mock_rpc
+
+        request = {}
+        await client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.list_linked_targets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_async(transport: str = 'grpc_asyncio', request_type=document_link_service.ListLinkedTargetsRequest):
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedTargetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        response = await client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.ListLinkedTargetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_link_service.ListLinkedTargetsResponse)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_async_from_dict():
+    await test_list_linked_targets_async(request_type=dict)
+
+def test_list_linked_targets_field_headers():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.ListLinkedTargetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        call.return_value = document_link_service.ListLinkedTargetsResponse()
+        client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_field_headers_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.ListLinkedTargetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedTargetsResponse())
+        await client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_list_linked_targets_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedTargetsResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_linked_targets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_list_linked_targets_flattened_error():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_linked_targets(
+            document_link_service.ListLinkedTargetsRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_flattened_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedTargetsResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedTargetsResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_linked_targets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_list_linked_targets_flattened_error_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_linked_targets(
+            document_link_service.ListLinkedTargetsRequest(),
+            parent='parent_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.ListLinkedSourcesRequest,
+  dict,
+])
+def test_list_linked_sources(request_type, transport: str = 'grpc'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedSourcesResponse(
+            next_page_token='next_page_token_value',
+        )
+        response = client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.ListLinkedSourcesRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListLinkedSourcesPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+def test_list_linked_sources_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_link_service.ListLinkedSourcesRequest(
+        parent='parent_value',
+        page_token='page_token_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.list_linked_sources(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_link_service.ListLinkedSourcesRequest(
+            parent='parent_value',
+            page_token='page_token_value',
+        )
+
+def test_list_linked_sources_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_linked_sources in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_linked_sources] = mock_rpc
+        request = {}
+        client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_linked_sources(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.list_linked_sources in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.list_linked_sources] = mock_rpc
+
+        request = {}
+        await client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.list_linked_sources(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_async(transport: str = 'grpc_asyncio', request_type=document_link_service.ListLinkedSourcesRequest):
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedSourcesResponse(
+            next_page_token='next_page_token_value',
+        ))
+        response = await client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.ListLinkedSourcesRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListLinkedSourcesAsyncPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_async_from_dict():
+    await test_list_linked_sources_async(request_type=dict)
+
+def test_list_linked_sources_field_headers():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.ListLinkedSourcesRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        call.return_value = document_link_service.ListLinkedSourcesResponse()
+        client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_field_headers_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.ListLinkedSourcesRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedSourcesResponse())
+        await client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_list_linked_sources_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedSourcesResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_linked_sources(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_list_linked_sources_flattened_error():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_linked_sources(
+            document_link_service.ListLinkedSourcesRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_flattened_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.ListLinkedSourcesResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedSourcesResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_linked_sources(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_flattened_error_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_linked_sources(
+            document_link_service.ListLinkedSourcesRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_linked_sources_pager(transport_name: str = "grpc"):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='abc',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[],
+                next_page_token='def',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        expected_metadata = ()
+        retry = retries.Retry()
+        timeout = 5
+        expected_metadata = tuple(expected_metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ('parent', ''),
+            )),
+        )
+        pager = client.list_linked_sources(request={}, retry=retry, timeout=timeout)
+
+        assert pager._metadata == expected_metadata
+        assert pager._retry == retry
+        assert pager._timeout == timeout
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_link_service.DocumentLink)
+                   for i in results)
+def test_list_linked_sources_pages(transport_name: str = "grpc"):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='abc',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[],
+                next_page_token='def',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_linked_sources(request={}).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_async_pager():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='abc',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[],
+                next_page_token='def',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_linked_sources(request={},)
+        assert async_pager.next_page_token == 'abc'
+        responses = []
+        async for response in async_pager: # pragma: no branch
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(isinstance(i, document_link_service.DocumentLink)
+                for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_linked_sources_async_pages():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='abc',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[],
+                next_page_token='def',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+        # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+        async for page_ in ( # pragma: no branch
+            await client.list_linked_sources(request={})
+        ).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.CreateDocumentLinkRequest,
+  dict,
+])
+def test_create_document_link(request_type, transport: str = 'grpc'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.DocumentLink(
+            name='name_value',
+            description='description_value',
+            state=document_link_service.DocumentLink.State.ACTIVE,
+        )
+        response = client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.CreateDocumentLinkRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_link_service.DocumentLink)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.state == document_link_service.DocumentLink.State.ACTIVE
+
+
+def test_create_document_link_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_link_service.CreateDocumentLinkRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.create_document_link(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_link_service.CreateDocumentLinkRequest(
+            parent='parent_value',
+        )
+
+def test_create_document_link_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document_link in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document_link] = mock_rpc
+        request = {}
+        client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.create_document_link in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.create_document_link] = mock_rpc
+
+        request = {}
+        await client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.create_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_link_async(transport: str = 'grpc_asyncio', request_type=document_link_service.CreateDocumentLinkRequest):
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.DocumentLink(
+            name='name_value',
+            description='description_value',
+            state=document_link_service.DocumentLink.State.ACTIVE,
+        ))
+        response = await client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.CreateDocumentLinkRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_link_service.DocumentLink)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.state == document_link_service.DocumentLink.State.ACTIVE
+
+
+@pytest.mark.asyncio
+async def test_create_document_link_async_from_dict():
+    await test_create_document_link_async(request_type=dict)
+
+def test_create_document_link_field_headers():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.CreateDocumentLinkRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        call.return_value = document_link_service.DocumentLink()
+        client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_create_document_link_field_headers_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.CreateDocumentLinkRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.DocumentLink())
+        await client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_create_document_link_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.DocumentLink()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_document_link(
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document_link
+        mock_val = document_link_service.DocumentLink(name='name_value')
+        assert arg == mock_val
+
+
+def test_create_document_link_flattened_error():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document_link(
+            document_link_service.CreateDocumentLinkRequest(),
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_create_document_link_flattened_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_link_service.DocumentLink()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.DocumentLink())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_document_link(
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document_link
+        mock_val = document_link_service.DocumentLink(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_create_document_link_flattened_error_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_document_link(
+            document_link_service.CreateDocumentLinkRequest(),
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.DeleteDocumentLinkRequest,
+  dict,
+])
+def test_delete_document_link(request_type, transport: str = 'grpc'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.DeleteDocumentLinkRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_document_link_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_link_service.DeleteDocumentLinkRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.delete_document_link(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_link_service.DeleteDocumentLinkRequest(
+            name='name_value',
+        )
+
+def test_delete_document_link_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document_link in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document_link] = mock_rpc
+        request = {}
+        client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.delete_document_link in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.delete_document_link] = mock_rpc
+
+        request = {}
+        await client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.delete_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_link_async(transport: str = 'grpc_asyncio', request_type=document_link_service.DeleteDocumentLinkRequest):
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_link_service.DeleteDocumentLinkRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_document_link_async_from_dict():
+    await test_delete_document_link_async(request_type=dict)
+
+def test_delete_document_link_field_headers():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.DeleteDocumentLinkRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_delete_document_link_field_headers_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_link_service.DeleteDocumentLinkRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_delete_document_link_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_document_link(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_delete_document_link_flattened_error():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document_link(
+            document_link_service.DeleteDocumentLinkRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_delete_document_link_flattened_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_document_link(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_delete_document_link_flattened_error_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_document_link(
+            document_link_service.DeleteDocumentLinkRequest(),
+            name='name_value',
+        )
+
+
+def test_list_linked_targets_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_linked_targets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_linked_targets] = mock_rpc
+
+        request = {}
+        client.list_linked_targets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_linked_targets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_list_linked_targets_rest_required_fields(request_type=document_link_service.ListLinkedTargetsRequest):
+    transport_class = transports.DocumentLinkServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_linked_targets._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_linked_targets._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_link_service.ListLinkedTargetsResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_link_service.ListLinkedTargetsResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.list_linked_targets(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_list_linked_targets_rest_unset_required_fields():
+    transport = transports.DocumentLinkServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.list_linked_targets._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", )))
+
+
+def test_list_linked_targets_rest_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.ListLinkedTargetsResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_link_service.ListLinkedTargetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.list_linked_targets(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*/documents/*}/linkedTargets" % client.transport._host, args[1])
+
+
+def test_list_linked_targets_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_linked_targets(
+            document_link_service.ListLinkedTargetsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_linked_sources_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_linked_sources in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_linked_sources] = mock_rpc
+
+        request = {}
+        client.list_linked_sources(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_linked_sources(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_list_linked_sources_rest_required_fields(request_type=document_link_service.ListLinkedSourcesRequest):
+    transport_class = transports.DocumentLinkServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_linked_sources._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_linked_sources._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_link_service.ListLinkedSourcesResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_link_service.ListLinkedSourcesResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.list_linked_sources(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_list_linked_sources_rest_unset_required_fields():
+    transport = transports.DocumentLinkServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.list_linked_sources._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", )))
+
+
+def test_list_linked_sources_rest_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.ListLinkedSourcesResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_link_service.ListLinkedSourcesResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.list_linked_sources(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*/documents/*}/linkedSources" % client.transport._host, args[1])
+
+
+def test_list_linked_sources_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_linked_sources(
+            document_link_service.ListLinkedSourcesRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_linked_sources_rest_pager(transport: str = 'rest'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # TODO(kbandes): remove this mock unless there's a good reason for it.
+        #with mock.patch.object(path_template, 'transcode') as transcode:
+        # Set the response as a series of pages
+        response = (
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='abc',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[],
+                next_page_token='def',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_link_service.ListLinkedSourcesResponse(
+                document_links=[
+                    document_link_service.DocumentLink(),
+                    document_link_service.DocumentLink(),
+                ],
+            ),
+        )
+        # Two responses for two calls
+        response = response + response
+
+        # Wrap the values into proper Response objs
+        response = tuple(document_link_service.ListLinkedSourcesResponse.to_json(x) for x in response)
+        return_values = tuple(Response() for i in response)
+        for return_val, response_val in zip(return_values, response):
+            return_val._content = response_val.encode('UTF-8')
+            return_val.status_code = 200
+        req.side_effect = return_values
+
+        sample_request = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        pager = client.list_linked_sources(request=sample_request)
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_link_service.DocumentLink)
+                for i in results)
+
+        pages = list(client.list_linked_sources(request=sample_request).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_create_document_link_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document_link in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document_link] = mock_rpc
+
+        request = {}
+        client.create_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_create_document_link_rest_required_fields(request_type=document_link_service.CreateDocumentLinkRequest):
+    transport_class = transports.DocumentLinkServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document_link._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document_link._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_link_service.DocumentLink()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_link_service.DocumentLink.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.create_document_link(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_create_document_link_rest_unset_required_fields():
+    transport = transports.DocumentLinkServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.create_document_link._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", "documentLink", )))
+
+
+def test_create_document_link_rest_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.DocumentLink()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_link_service.DocumentLink.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.create_document_link(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*/documents/*}/documentLinks" % client.transport._host, args[1])
+
+
+def test_create_document_link_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document_link(
+            document_link_service.CreateDocumentLinkRequest(),
+            parent='parent_value',
+            document_link=document_link_service.DocumentLink(name='name_value'),
+        )
+
+
+def test_delete_document_link_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document_link in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document_link] = mock_rpc
+
+        request = {}
+        client.delete_document_link(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document_link(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_delete_document_link_rest_required_fields(request_type=document_link_service.DeleteDocumentLinkRequest):
+    transport_class = transports.DocumentLinkServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document_link._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document_link._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = None
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = ''
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.delete_document_link(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_delete_document_link_rest_unset_required_fields():
+    transport = transports.DocumentLinkServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.delete_document_link._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_delete_document_link_rest_flattened():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.delete_document_link(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documents/*/documentLinks/*}:delete" % client.transport._host, args[1])
+
+
+def test_delete_document_link_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document_link(
+            document_link_service.DeleteDocumentLinkRequest(),
+            name='name_value',
+        )
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentLinkServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentLinkServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentLinkServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentLinkServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = DocumentLinkServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.DocumentLinkServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.DocumentLinkServiceGrpcTransport,
+    transports.DocumentLinkServiceGrpcAsyncIOTransport,
+    transports.DocumentLinkServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = DocumentLinkServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_linked_targets_empty_call_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        call.return_value = document_link_service.ListLinkedTargetsResponse()
+        client.list_linked_targets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedTargetsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_linked_sources_empty_call_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        call.return_value = document_link_service.ListLinkedSourcesResponse()
+        client.list_linked_sources(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedSourcesRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_link_empty_call_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        call.return_value = document_link_service.DocumentLink()
+        client.create_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.CreateDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_link_empty_call_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.DeleteDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = DocumentLinkServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_linked_targets_empty_call_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedTargetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        await client.list_linked_targets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedTargetsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_linked_sources_empty_call_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.ListLinkedSourcesResponse(
+            next_page_token='next_page_token_value',
+        ))
+        await client.list_linked_sources(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedSourcesRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_document_link_empty_call_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_link_service.DocumentLink(
+            name='name_value',
+            description='description_value',
+            state=document_link_service.DocumentLink.State.ACTIVE,
+        ))
+        await client.create_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.CreateDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_document_link_empty_call_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.DeleteDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = DocumentLinkServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_list_linked_targets_rest_bad_request(request_type=document_link_service.ListLinkedTargetsRequest):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.list_linked_targets(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.ListLinkedTargetsRequest,
+  dict,
+])
+def test_list_linked_targets_rest_call_success(request_type):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.ListLinkedTargetsResponse(
+              next_page_token='next_page_token_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_link_service.ListLinkedTargetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.list_linked_targets(request)
+
+    assert response.raw_page is response
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_link_service.ListLinkedTargetsResponse)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_linked_targets_rest_interceptors(null_interceptor):
+    transport = transports.DocumentLinkServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentLinkServiceRestInterceptor(),
+        )
+    client = DocumentLinkServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "post_list_linked_targets") as post, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_targets") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_link_service.ListLinkedTargetsRequest.pb(document_link_service.ListLinkedTargetsRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_link_service.ListLinkedTargetsResponse.to_json(document_link_service.ListLinkedTargetsResponse())
+        req.return_value.content = return_value
+
+        request = document_link_service.ListLinkedTargetsRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_link_service.ListLinkedTargetsResponse()
+
+        client.list_linked_targets(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_list_linked_sources_rest_bad_request(request_type=document_link_service.ListLinkedSourcesRequest):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.list_linked_sources(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.ListLinkedSourcesRequest,
+  dict,
+])
+def test_list_linked_sources_rest_call_success(request_type):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.ListLinkedSourcesResponse(
+              next_page_token='next_page_token_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_link_service.ListLinkedSourcesResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.list_linked_sources(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListLinkedSourcesPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_linked_sources_rest_interceptors(null_interceptor):
+    transport = transports.DocumentLinkServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentLinkServiceRestInterceptor(),
+        )
+    client = DocumentLinkServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "post_list_linked_sources") as post, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_sources") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_link_service.ListLinkedSourcesRequest.pb(document_link_service.ListLinkedSourcesRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_link_service.ListLinkedSourcesResponse.to_json(document_link_service.ListLinkedSourcesResponse())
+        req.return_value.content = return_value
+
+        request = document_link_service.ListLinkedSourcesRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_link_service.ListLinkedSourcesResponse()
+
+        client.list_linked_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_create_document_link_rest_bad_request(request_type=document_link_service.CreateDocumentLinkRequest):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.create_document_link(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.CreateDocumentLinkRequest,
+  dict,
+])
+def test_create_document_link_rest_call_success(request_type):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_link_service.DocumentLink(
+              name='name_value',
+              description='description_value',
+              state=document_link_service.DocumentLink.State.ACTIVE,
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_link_service.DocumentLink.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.create_document_link(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_link_service.DocumentLink)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.state == document_link_service.DocumentLink.State.ACTIVE
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_document_link_rest_interceptors(null_interceptor):
+    transport = transports.DocumentLinkServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentLinkServiceRestInterceptor(),
+        )
+    client = DocumentLinkServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "post_create_document_link") as post, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "pre_create_document_link") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_link_service.CreateDocumentLinkRequest.pb(document_link_service.CreateDocumentLinkRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_link_service.DocumentLink.to_json(document_link_service.DocumentLink())
+        req.return_value.content = return_value
+
+        request = document_link_service.CreateDocumentLinkRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_link_service.DocumentLink()
+
+        client.create_document_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_delete_document_link_rest_bad_request(request_type=document_link_service.DeleteDocumentLinkRequest):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.delete_document_link(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_link_service.DeleteDocumentLinkRequest,
+  dict,
+])
+def test_delete_document_link_rest_call_success(request_type):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3/documentLinks/sample4'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.delete_document_link(request)
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_document_link_rest_interceptors(null_interceptor):
+    transport = transports.DocumentLinkServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentLinkServiceRestInterceptor(),
+        )
+    client = DocumentLinkServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentLinkServiceRestInterceptor, "pre_delete_document_link") as pre:
+        pre.assert_not_called()
+        pb_message = document_link_service.DeleteDocumentLinkRequest.pb(document_link_service.DeleteDocumentLinkRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+
+        request = document_link_service.DeleteDocumentLinkRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+
+        client.delete_document_link(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_linked_targets_empty_call_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_targets),
+            '__call__') as call:
+        client.list_linked_targets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedTargetsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_linked_sources_empty_call_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_linked_sources),
+            '__call__') as call:
+        client.list_linked_sources(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.ListLinkedSourcesRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_link_empty_call_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_link),
+            '__call__') as call:
+        client.create_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.CreateDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_link_empty_call_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_link),
+            '__call__') as call:
+        client.delete_document_link(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_link_service.DeleteDocumentLinkRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.DocumentLinkServiceGrpcTransport,
+    )
+
+def test_document_link_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.DocumentLinkServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_document_link_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.DocumentLinkServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'list_linked_targets',
+        'list_linked_sources',
+        'create_document_link',
+        'delete_document_link',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_document_link_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentLinkServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_document_link_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.document_link_service.transports.DocumentLinkServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentLinkServiceTransport()
+        adc.assert_called_once()
+
+
+def test_document_link_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        DocumentLinkServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentLinkServiceGrpcTransport,
+        transports.DocumentLinkServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_document_link_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentLinkServiceGrpcTransport,
+        transports.DocumentLinkServiceGrpcAsyncIOTransport,
+        transports.DocumentLinkServiceRestTransport,
+    ],
+)
+def test_document_link_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.DocumentLinkServiceGrpcTransport, grpc_helpers),
+        (transports.DocumentLinkServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_document_link_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.DocumentLinkServiceGrpcTransport, transports.DocumentLinkServiceGrpcAsyncIOTransport])
+def test_document_link_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_document_link_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.DocumentLinkServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_link_service_host_no_port(transport_name):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_link_service_host_with_port(transport_name):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_document_link_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = DocumentLinkServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = DocumentLinkServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.list_linked_targets._session
+    session2 = client2.transport.list_linked_targets._session
+    assert session1 != session2
+    session1 = client1.transport.list_linked_sources._session
+    session2 = client2.transport.list_linked_sources._session
+    assert session1 != session2
+    session1 = client1.transport.create_document_link._session
+    session2 = client2.transport.create_document_link._session
+    assert session1 != session2
+    session1 = client1.transport.delete_document_link._session
+    session2 = client2.transport.delete_document_link._session
+    assert session1 != session2
+def test_document_link_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentLinkServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_document_link_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentLinkServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentLinkServiceGrpcTransport, transports.DocumentLinkServiceGrpcAsyncIOTransport])
+def test_document_link_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentLinkServiceGrpcTransport, transports.DocumentLinkServiceGrpcAsyncIOTransport])
+def test_document_link_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_document_path():
+    project = "squid"
+    location = "clam"
+    document = "whelk"
+    expected = "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+    actual = DocumentLinkServiceClient.document_path(project, location, document)
+    assert expected == actual
+
+
+def test_parse_document_path():
+    expected = {
+        "project": "octopus",
+        "location": "oyster",
+        "document": "nudibranch",
+    }
+    path = DocumentLinkServiceClient.document_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_document_path(path)
+    assert expected == actual
+
+def test_document_link_path():
+    project = "cuttlefish"
+    location = "mussel"
+    document = "winkle"
+    document_link = "nautilus"
+    expected = "projects/{project}/locations/{location}/documents/{document}/documentLinks/{document_link}".format(project=project, location=location, document=document, document_link=document_link, )
+    actual = DocumentLinkServiceClient.document_link_path(project, location, document, document_link)
+    assert expected == actual
+
+
+def test_parse_document_link_path():
+    expected = {
+        "project": "scallop",
+        "location": "abalone",
+        "document": "squid",
+        "document_link": "clam",
+    }
+    path = DocumentLinkServiceClient.document_link_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_document_link_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "whelk"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = DocumentLinkServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "octopus",
+    }
+    path = DocumentLinkServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "oyster"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = DocumentLinkServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "nudibranch",
+    }
+    path = DocumentLinkServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "cuttlefish"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = DocumentLinkServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "mussel",
+    }
+    path = DocumentLinkServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "winkle"
+    expected = "projects/{project}".format(project=project, )
+    actual = DocumentLinkServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "nautilus",
+    }
+    path = DocumentLinkServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "scallop"
+    location = "abalone"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = DocumentLinkServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "squid",
+        "location": "clam",
+    }
+    path = DocumentLinkServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentLinkServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.DocumentLinkServiceTransport, '_prep_wrapped_messages') as prep:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.DocumentLinkServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = DocumentLinkServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = DocumentLinkServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = DocumentLinkServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = DocumentLinkServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (DocumentLinkServiceClient, transports.DocumentLinkServiceGrpcTransport),
+    (DocumentLinkServiceAsyncClient, transports.DocumentLinkServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py
new file mode 100644
index 000000000000..9226aba9db80
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py
@@ -0,0 +1,5333 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.document_schema_service import DocumentSchemaServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.document_schema_service import DocumentSchemaServiceClient
+from google.cloud.contentwarehouse_v1.services.document_schema_service import pagers
+from google.cloud.contentwarehouse_v1.services.document_schema_service import transports
+from google.cloud.contentwarehouse_v1.types import document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema as gcc_document_schema
+from google.cloud.contentwarehouse_v1.types import document_schema_service
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+from google.protobuf import timestamp_pb2  # type: ignore
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(None) is None
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert DocumentSchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert DocumentSchemaServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            DocumentSchemaServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            DocumentSchemaServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert DocumentSchemaServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert DocumentSchemaServiceClient._get_client_cert_source(None, False) is None
+    assert DocumentSchemaServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert DocumentSchemaServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert DocumentSchemaServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert DocumentSchemaServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(DocumentSchemaServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceClient))
+@mock.patch.object(DocumentSchemaServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert DocumentSchemaServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DocumentSchemaServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, None, default_universe, "always") == DocumentSchemaServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DocumentSchemaServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert DocumentSchemaServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        DocumentSchemaServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert DocumentSchemaServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert DocumentSchemaServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert DocumentSchemaServiceClient._get_universe_domain(None, None) == DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        DocumentSchemaServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentSchemaServiceClient, "grpc"),
+    (DocumentSchemaServiceAsyncClient, "grpc_asyncio"),
+    (DocumentSchemaServiceClient, "rest"),
+])
+def test_document_schema_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.DocumentSchemaServiceGrpcTransport, "grpc"),
+    (transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.DocumentSchemaServiceRestTransport, "rest"),
+])
+def test_document_schema_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentSchemaServiceClient, "grpc"),
+    (DocumentSchemaServiceAsyncClient, "grpc_asyncio"),
+    (DocumentSchemaServiceClient, "rest"),
+])
+def test_document_schema_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_document_schema_service_client_get_transport_class():
+    transport = DocumentSchemaServiceClient.get_transport_class()
+    available_transports = [
+        transports.DocumentSchemaServiceGrpcTransport,
+        transports.DocumentSchemaServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = DocumentSchemaServiceClient.get_transport_class("grpc")
+    assert transport == transports.DocumentSchemaServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc"),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest"),
+])
+@mock.patch.object(DocumentSchemaServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceClient))
+@mock.patch.object(DocumentSchemaServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceAsyncClient))
+def test_document_schema_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(DocumentSchemaServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(DocumentSchemaServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc", "true"),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc", "false"),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest", "true"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(DocumentSchemaServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceClient))
+@mock.patch.object(DocumentSchemaServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_document_schema_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    DocumentSchemaServiceClient, DocumentSchemaServiceAsyncClient
+])
+@mock.patch.object(DocumentSchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentSchemaServiceClient))
+@mock.patch.object(DocumentSchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentSchemaServiceAsyncClient))
+def test_document_schema_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    DocumentSchemaServiceClient, DocumentSchemaServiceAsyncClient
+])
+@mock.patch.object(DocumentSchemaServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceClient))
+@mock.patch.object(DocumentSchemaServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentSchemaServiceAsyncClient))
+def test_document_schema_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = DocumentSchemaServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentSchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc"),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest"),
+])
+def test_document_schema_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceRestTransport, "rest", None),
+])
+def test_document_schema_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_document_schema_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = DocumentSchemaServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_document_schema_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.CreateDocumentSchemaRequest,
+  dict,
+])
+def test_create_document_schema(request_type, transport: str = 'grpc'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        )
+        response = client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.CreateDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+def test_create_document_schema_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_schema_service.CreateDocumentSchemaRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.create_document_schema(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_schema_service.CreateDocumentSchemaRequest(
+            parent='parent_value',
+        )
+
+def test_create_document_schema_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document_schema] = mock_rpc
+        request = {}
+        client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.create_document_schema in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.create_document_schema] = mock_rpc
+
+        request = {}
+        await client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.create_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_schema_async(transport: str = 'grpc_asyncio', request_type=document_schema_service.CreateDocumentSchemaRequest):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        response = await client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.CreateDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.asyncio
+async def test_create_document_schema_async_from_dict():
+    await test_create_document_schema_async(request_type=dict)
+
+def test_create_document_schema_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.CreateDocumentSchemaRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        call.return_value = gcc_document_schema.DocumentSchema()
+        client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_create_document_schema_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.CreateDocumentSchemaRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema())
+        await client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_create_document_schema_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_document_schema(
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document_schema
+        mock_val = gcc_document_schema.DocumentSchema(name='name_value')
+        assert arg == mock_val
+
+
+def test_create_document_schema_flattened_error():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document_schema(
+            document_schema_service.CreateDocumentSchemaRequest(),
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_create_document_schema_flattened_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_document_schema(
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document_schema
+        mock_val = gcc_document_schema.DocumentSchema(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_create_document_schema_flattened_error_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_document_schema(
+            document_schema_service.CreateDocumentSchemaRequest(),
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.UpdateDocumentSchemaRequest,
+  dict,
+])
+def test_update_document_schema(request_type, transport: str = 'grpc'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        )
+        response = client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.UpdateDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+def test_update_document_schema_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_schema_service.UpdateDocumentSchemaRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.update_document_schema(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_schema_service.UpdateDocumentSchemaRequest(
+            name='name_value',
+        )
+
+def test_update_document_schema_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_document_schema] = mock_rpc
+        request = {}
+        client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_document_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.update_document_schema in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.update_document_schema] = mock_rpc
+
+        request = {}
+        await client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.update_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_document_schema_async(transport: str = 'grpc_asyncio', request_type=document_schema_service.UpdateDocumentSchemaRequest):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        response = await client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.UpdateDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.asyncio
+async def test_update_document_schema_async_from_dict():
+    await test_update_document_schema_async(request_type=dict)
+
+def test_update_document_schema_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.UpdateDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        call.return_value = gcc_document_schema.DocumentSchema()
+        client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_update_document_schema_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.UpdateDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema())
+        await client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_update_document_schema_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.update_document_schema(
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].document_schema
+        mock_val = gcc_document_schema.DocumentSchema(name='name_value')
+        assert arg == mock_val
+
+
+def test_update_document_schema_flattened_error():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_document_schema(
+            document_schema_service.UpdateDocumentSchemaRequest(),
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_update_document_schema_flattened_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document_schema.DocumentSchema()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.update_document_schema(
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].document_schema
+        mock_val = gcc_document_schema.DocumentSchema(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_update_document_schema_flattened_error_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.update_document_schema(
+            document_schema_service.UpdateDocumentSchemaRequest(),
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.GetDocumentSchemaRequest,
+  dict,
+])
+def test_get_document_schema(request_type, transport: str = 'grpc'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        )
+        response = client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.GetDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+def test_get_document_schema_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_schema_service.GetDocumentSchemaRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.get_document_schema(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_schema_service.GetDocumentSchemaRequest(
+            name='name_value',
+        )
+
+def test_get_document_schema_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_document_schema] = mock_rpc
+        request = {}
+        client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_document_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.get_document_schema in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.get_document_schema] = mock_rpc
+
+        request = {}
+        await client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.get_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_document_schema_async(transport: str = 'grpc_asyncio', request_type=document_schema_service.GetDocumentSchemaRequest):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        response = await client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.GetDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.asyncio
+async def test_get_document_schema_async_from_dict():
+    await test_get_document_schema_async(request_type=dict)
+
+def test_get_document_schema_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.GetDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        call.return_value = document_schema.DocumentSchema()
+        client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_get_document_schema_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.GetDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema.DocumentSchema())
+        await client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_get_document_schema_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema.DocumentSchema()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_document_schema(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_get_document_schema_flattened_error():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_document_schema(
+            document_schema_service.GetDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_get_document_schema_flattened_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema.DocumentSchema()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema.DocumentSchema())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_document_schema(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_get_document_schema_flattened_error_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_document_schema(
+            document_schema_service.GetDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.DeleteDocumentSchemaRequest,
+  dict,
+])
+def test_delete_document_schema(request_type, transport: str = 'grpc'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.DeleteDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_document_schema_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_schema_service.DeleteDocumentSchemaRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.delete_document_schema(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_schema_service.DeleteDocumentSchemaRequest(
+            name='name_value',
+        )
+
+def test_delete_document_schema_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document_schema] = mock_rpc
+        request = {}
+        client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.delete_document_schema in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.delete_document_schema] = mock_rpc
+
+        request = {}
+        await client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.delete_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_async(transport: str = 'grpc_asyncio', request_type=document_schema_service.DeleteDocumentSchemaRequest):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.DeleteDocumentSchemaRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_async_from_dict():
+    await test_delete_document_schema_async(request_type=dict)
+
+def test_delete_document_schema_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.DeleteDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.DeleteDocumentSchemaRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_delete_document_schema_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_document_schema(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_delete_document_schema_flattened_error():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document_schema(
+            document_schema_service.DeleteDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_flattened_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_document_schema(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_delete_document_schema_flattened_error_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_document_schema(
+            document_schema_service.DeleteDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.ListDocumentSchemasRequest,
+  dict,
+])
+def test_list_document_schemas(request_type, transport: str = 'grpc'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema_service.ListDocumentSchemasResponse(
+            next_page_token='next_page_token_value',
+        )
+        response = client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.ListDocumentSchemasRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListDocumentSchemasPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+def test_list_document_schemas_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_schema_service.ListDocumentSchemasRequest(
+        parent='parent_value',
+        page_token='page_token_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.list_document_schemas(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_schema_service.ListDocumentSchemasRequest(
+            parent='parent_value',
+            page_token='page_token_value',
+        )
+
+def test_list_document_schemas_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_document_schemas in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_document_schemas] = mock_rpc
+        request = {}
+        client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_document_schemas(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.list_document_schemas in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.list_document_schemas] = mock_rpc
+
+        request = {}
+        await client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.list_document_schemas(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_async(transport: str = 'grpc_asyncio', request_type=document_schema_service.ListDocumentSchemasRequest):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_schema_service.ListDocumentSchemasResponse(
+            next_page_token='next_page_token_value',
+        ))
+        response = await client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_schema_service.ListDocumentSchemasRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListDocumentSchemasAsyncPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_async_from_dict():
+    await test_list_document_schemas_async(request_type=dict)
+
+def test_list_document_schemas_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.ListDocumentSchemasRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        call.return_value = document_schema_service.ListDocumentSchemasResponse()
+        client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_schema_service.ListDocumentSchemasRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema_service.ListDocumentSchemasResponse())
+        await client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_list_document_schemas_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema_service.ListDocumentSchemasResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_document_schemas(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_list_document_schemas_flattened_error():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_document_schemas(
+            document_schema_service.ListDocumentSchemasRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_flattened_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_schema_service.ListDocumentSchemasResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema_service.ListDocumentSchemasResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_document_schemas(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_flattened_error_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_document_schemas(
+            document_schema_service.ListDocumentSchemasRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_document_schemas_pager(transport_name: str = "grpc"):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='abc',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[],
+                next_page_token='def',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        expected_metadata = ()
+        retry = retries.Retry()
+        timeout = 5
+        expected_metadata = tuple(expected_metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ('parent', ''),
+            )),
+        )
+        pager = client.list_document_schemas(request={}, retry=retry, timeout=timeout)
+
+        assert pager._metadata == expected_metadata
+        assert pager._retry == retry
+        assert pager._timeout == timeout
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_schema.DocumentSchema)
+                   for i in results)
+def test_list_document_schemas_pages(transport_name: str = "grpc"):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='abc',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[],
+                next_page_token='def',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_document_schemas(request={}).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_async_pager():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='abc',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[],
+                next_page_token='def',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_document_schemas(request={},)
+        assert async_pager.next_page_token == 'abc'
+        responses = []
+        async for response in async_pager: # pragma: no branch
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(isinstance(i, document_schema.DocumentSchema)
+                for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_document_schemas_async_pages():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='abc',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[],
+                next_page_token='def',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+        # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+        async for page_ in ( # pragma: no branch
+            await client.list_document_schemas(request={})
+        ).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_create_document_schema_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document_schema] = mock_rpc
+
+        request = {}
+        client.create_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_create_document_schema_rest_required_fields(request_type=document_schema_service.CreateDocumentSchemaRequest):
+    transport_class = transports.DocumentSchemaServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = gcc_document_schema.DocumentSchema()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.create_document_schema(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_create_document_schema_rest_unset_required_fields():
+    transport = transports.DocumentSchemaServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.create_document_schema._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", "documentSchema", )))
+
+
+def test_create_document_schema_rest_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document_schema.DocumentSchema()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.create_document_schema(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/documentSchemas" % client.transport._host, args[1])
+
+
+def test_create_document_schema_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document_schema(
+            document_schema_service.CreateDocumentSchemaRequest(),
+            parent='parent_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+
+def test_update_document_schema_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_document_schema] = mock_rpc
+
+        request = {}
+        client.update_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_update_document_schema_rest_required_fields(request_type=document_schema_service.UpdateDocumentSchemaRequest):
+    transport_class = transports.DocumentSchemaServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = gcc_document_schema.DocumentSchema()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "patch",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.update_document_schema(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_update_document_schema_rest_unset_required_fields():
+    transport = transports.DocumentSchemaServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.update_document_schema._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", "documentSchema", )))
+
+
+def test_update_document_schema_rest_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document_schema.DocumentSchema()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.update_document_schema(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documentSchemas/*}" % client.transport._host, args[1])
+
+
+def test_update_document_schema_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_document_schema(
+            document_schema_service.UpdateDocumentSchemaRequest(),
+            name='name_value',
+            document_schema=gcc_document_schema.DocumentSchema(name='name_value'),
+        )
+
+
+def test_get_document_schema_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_document_schema] = mock_rpc
+
+        request = {}
+        client.get_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_get_document_schema_rest_required_fields(request_type=document_schema_service.GetDocumentSchemaRequest):
+    transport_class = transports.DocumentSchemaServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_schema.DocumentSchema()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_schema.DocumentSchema.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.get_document_schema(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_get_document_schema_rest_unset_required_fields():
+    transport = transports.DocumentSchemaServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.get_document_schema._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_get_document_schema_rest_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_schema.DocumentSchema()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.get_document_schema(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documentSchemas/*}" % client.transport._host, args[1])
+
+
+def test_get_document_schema_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_document_schema(
+            document_schema_service.GetDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+
+def test_delete_document_schema_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document_schema in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document_schema] = mock_rpc
+
+        request = {}
+        client.delete_document_schema(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document_schema(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_delete_document_schema_rest_required_fields(request_type=document_schema_service.DeleteDocumentSchemaRequest):
+    transport_class = transports.DocumentSchemaServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document_schema._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = None
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "delete",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = ''
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.delete_document_schema(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_delete_document_schema_rest_unset_required_fields():
+    transport = transports.DocumentSchemaServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.delete_document_schema._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_delete_document_schema_rest_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.delete_document_schema(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documentSchemas/*}" % client.transport._host, args[1])
+
+
+def test_delete_document_schema_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document_schema(
+            document_schema_service.DeleteDocumentSchemaRequest(),
+            name='name_value',
+        )
+
+
+def test_list_document_schemas_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_document_schemas in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_document_schemas] = mock_rpc
+
+        request = {}
+        client.list_document_schemas(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_document_schemas(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_list_document_schemas_rest_required_fields(request_type=document_schema_service.ListDocumentSchemasRequest):
+    transport_class = transports.DocumentSchemaServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_document_schemas._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_document_schemas._get_unset_required_fields(jsonified_request)
+    # Check that path parameters and body parameters are not mixing in.
+    assert not set(unset_fields) - set(("page_size", "page_token", ))
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_schema_service.ListDocumentSchemasResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_schema_service.ListDocumentSchemasResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.list_document_schemas(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_list_document_schemas_rest_unset_required_fields():
+    transport = transports.DocumentSchemaServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.list_document_schemas._get_unset_required_fields({})
+    assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", )))
+
+
+def test_list_document_schemas_rest_flattened():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_schema_service.ListDocumentSchemasResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_schema_service.ListDocumentSchemasResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.list_document_schemas(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/documentSchemas" % client.transport._host, args[1])
+
+
+def test_list_document_schemas_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_document_schemas(
+            document_schema_service.ListDocumentSchemasRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_document_schemas_rest_pager(transport: str = 'rest'):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # TODO(kbandes): remove this mock unless there's a good reason for it.
+        #with mock.patch.object(path_template, 'transcode') as transcode:
+        # Set the response as a series of pages
+        response = (
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='abc',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[],
+                next_page_token='def',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_schema_service.ListDocumentSchemasResponse(
+                document_schemas=[
+                    document_schema.DocumentSchema(),
+                    document_schema.DocumentSchema(),
+                ],
+            ),
+        )
+        # Two responses for two calls
+        response = response + response
+
+        # Wrap the values into proper Response objs
+        response = tuple(document_schema_service.ListDocumentSchemasResponse.to_json(x) for x in response)
+        return_values = tuple(Response() for i in response)
+        for return_val, response_val in zip(return_values, response):
+            return_val._content = response_val.encode('UTF-8')
+            return_val.status_code = 200
+        req.side_effect = return_values
+
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        pager = client.list_document_schemas(request=sample_request)
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_schema.DocumentSchema)
+                for i in results)
+
+        pages = list(client.list_document_schemas(request=sample_request).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentSchemaServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentSchemaServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentSchemaServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentSchemaServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = DocumentSchemaServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.DocumentSchemaServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.DocumentSchemaServiceGrpcTransport,
+    transports.DocumentSchemaServiceGrpcAsyncIOTransport,
+    transports.DocumentSchemaServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = DocumentSchemaServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_schema_empty_call_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        call.return_value = gcc_document_schema.DocumentSchema()
+        client.create_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.CreateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_document_schema_empty_call_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        call.return_value = gcc_document_schema.DocumentSchema()
+        client.update_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.UpdateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_document_schema_empty_call_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        call.return_value = document_schema.DocumentSchema()
+        client.get_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.GetDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_schema_empty_call_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.DeleteDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_document_schemas_empty_call_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        call.return_value = document_schema_service.ListDocumentSchemasResponse()
+        client.list_document_schemas(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.ListDocumentSchemasRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = DocumentSchemaServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_document_schema_empty_call_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        await client.create_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.CreateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_document_schema_empty_call_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        await client.update_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.UpdateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_document_schema_empty_call_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema.DocumentSchema(
+            name='name_value',
+            display_name='display_name_value',
+            document_is_folder=True,
+            description='description_value',
+        ))
+        await client.get_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.GetDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_document_schema_empty_call_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.DeleteDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_document_schemas_empty_call_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_schema_service.ListDocumentSchemasResponse(
+            next_page_token='next_page_token_value',
+        ))
+        await client.list_document_schemas(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.ListDocumentSchemasRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = DocumentSchemaServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_create_document_schema_rest_bad_request(request_type=document_schema_service.CreateDocumentSchemaRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.create_document_schema(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.CreateDocumentSchemaRequest,
+  dict,
+])
+def test_create_document_schema_rest_call_success(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request_init["document_schema"] = {'name': 'name_value', 'display_name': 'display_name_value', 'property_definitions': [{'name': 'name_value', 'display_name': 'display_name_value', 'is_repeatable': True, 'is_filterable': True, 'is_searchable': True, 'is_metadata': True, 'is_required': True, 'retrieval_importance': 1, 'integer_type_options': {}, 'float_type_options': {}, 'text_type_options': {}, 'property_type_options': {'property_definitions': {}}, 'enum_type_options': {'possible_values': ['possible_values_value1', 'possible_values_value2'], 'validation_check_disabled': True}, 'date_time_type_options': {}, 'map_type_options': {}, 'timestamp_type_options': {}, 'schema_sources': [{'name': 'name_value', 'processor_type': 'processor_type_value'}]}], 'document_is_folder': True, 'update_time': {'seconds': 751, 'nanos': 543}, 'create_time': {}, 'description': 'description_value'}
+    # The version of a generated dependency at test runtime may differ from the version used during generation.
+    # Delete any fields which are not present in the current runtime dependency
+    # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+    # Determine if the message type is proto-plus or protobuf
+    test_field = document_schema_service.CreateDocumentSchemaRequest.meta.fields["document_schema"]
+
+    def get_message_fields(field):
+        # Given a field which is a message (composite type), return a list with
+        # all the fields of the message.
+        # If the field is not a composite type, return an empty list.
+        message_fields = []
+
+        if hasattr(field, "message") and field.message:
+            is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+            if is_field_type_proto_plus_type:
+                message_fields = field.message.meta.fields.values()
+            # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+            else: # pragma: NO COVER
+                message_fields = field.message.DESCRIPTOR.fields
+        return message_fields
+
+    runtime_nested_fields = [
+        (field.name, nested_field.name)
+        for field in get_message_fields(test_field)
+        for nested_field in get_message_fields(field)
+    ]
+
+    subfields_not_in_runtime = []
+
+    # For each item in the sample request, create a list of sub fields which are not present at runtime
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for field, value in request_init["document_schema"].items(): # pragma: NO COVER
+        result = None
+        is_repeated = False
+        # For repeated fields
+        if isinstance(value, list) and len(value):
+            is_repeated = True
+            result = value[0]
+        # For fields where the type is another message
+        if isinstance(value, dict):
+            result = value
+
+        if result and hasattr(result, "keys"):
+            for subfield in result.keys():
+                if (field, subfield) not in runtime_nested_fields:
+                    subfields_not_in_runtime.append(
+                        {"field": field, "subfield": subfield, "is_repeated": is_repeated}
+                    )
+
+    # Remove fields from the sample request which are not present in the runtime version of the dependency
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+        field = subfield_to_delete.get("field")
+        field_repeated = subfield_to_delete.get("is_repeated")
+        subfield = subfield_to_delete.get("subfield")
+        if subfield:
+            if field_repeated:
+                for i in range(0, len(request_init["document_schema"][field])):
+                    del request_init["document_schema"][field][i][subfield]
+            else:
+                del request_init["document_schema"][field][subfield]
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document_schema.DocumentSchema(
+              name='name_value',
+              display_name='display_name_value',
+              document_is_folder=True,
+              description='description_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.create_document_schema(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_document_schema_rest_interceptors(null_interceptor):
+    transport = transports.DocumentSchemaServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentSchemaServiceRestInterceptor(),
+        )
+    client = DocumentSchemaServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "post_create_document_schema") as post, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "pre_create_document_schema") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_schema_service.CreateDocumentSchemaRequest.pb(document_schema_service.CreateDocumentSchemaRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = gcc_document_schema.DocumentSchema.to_json(gcc_document_schema.DocumentSchema())
+        req.return_value.content = return_value
+
+        request = document_schema_service.CreateDocumentSchemaRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = gcc_document_schema.DocumentSchema()
+
+        client.create_document_schema(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_update_document_schema_rest_bad_request(request_type=document_schema_service.UpdateDocumentSchemaRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.update_document_schema(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.UpdateDocumentSchemaRequest,
+  dict,
+])
+def test_update_document_schema_rest_call_success(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document_schema.DocumentSchema(
+              name='name_value',
+              display_name='display_name_value',
+              document_is_folder=True,
+              description='description_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = gcc_document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.update_document_schema(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_update_document_schema_rest_interceptors(null_interceptor):
+    transport = transports.DocumentSchemaServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentSchemaServiceRestInterceptor(),
+        )
+    client = DocumentSchemaServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "post_update_document_schema") as post, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "pre_update_document_schema") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_schema_service.UpdateDocumentSchemaRequest.pb(document_schema_service.UpdateDocumentSchemaRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = gcc_document_schema.DocumentSchema.to_json(gcc_document_schema.DocumentSchema())
+        req.return_value.content = return_value
+
+        request = document_schema_service.UpdateDocumentSchemaRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = gcc_document_schema.DocumentSchema()
+
+        client.update_document_schema(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_document_schema_rest_bad_request(request_type=document_schema_service.GetDocumentSchemaRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.get_document_schema(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.GetDocumentSchemaRequest,
+  dict,
+])
+def test_get_document_schema_rest_call_success(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_schema.DocumentSchema(
+              name='name_value',
+              display_name='display_name_value',
+              document_is_folder=True,
+              description='description_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_schema.DocumentSchema.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.get_document_schema(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_schema.DocumentSchema)
+    assert response.name == 'name_value'
+    assert response.display_name == 'display_name_value'
+    assert response.document_is_folder is True
+    assert response.description == 'description_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_document_schema_rest_interceptors(null_interceptor):
+    transport = transports.DocumentSchemaServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentSchemaServiceRestInterceptor(),
+        )
+    client = DocumentSchemaServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "post_get_document_schema") as post, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "pre_get_document_schema") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_schema_service.GetDocumentSchemaRequest.pb(document_schema_service.GetDocumentSchemaRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_schema.DocumentSchema.to_json(document_schema.DocumentSchema())
+        req.return_value.content = return_value
+
+        request = document_schema_service.GetDocumentSchemaRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_schema.DocumentSchema()
+
+        client.get_document_schema(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_delete_document_schema_rest_bad_request(request_type=document_schema_service.DeleteDocumentSchemaRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.delete_document_schema(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.DeleteDocumentSchemaRequest,
+  dict,
+])
+def test_delete_document_schema_rest_call_success(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documentSchemas/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.delete_document_schema(request)
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_document_schema_rest_interceptors(null_interceptor):
+    transport = transports.DocumentSchemaServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentSchemaServiceRestInterceptor(),
+        )
+    client = DocumentSchemaServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "pre_delete_document_schema") as pre:
+        pre.assert_not_called()
+        pb_message = document_schema_service.DeleteDocumentSchemaRequest.pb(document_schema_service.DeleteDocumentSchemaRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+
+        request = document_schema_service.DeleteDocumentSchemaRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+
+        client.delete_document_schema(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+
+
+def test_list_document_schemas_rest_bad_request(request_type=document_schema_service.ListDocumentSchemasRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.list_document_schemas(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_schema_service.ListDocumentSchemasRequest,
+  dict,
+])
+def test_list_document_schemas_rest_call_success(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_schema_service.ListDocumentSchemasResponse(
+              next_page_token='next_page_token_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_schema_service.ListDocumentSchemasResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.list_document_schemas(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListDocumentSchemasPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_document_schemas_rest_interceptors(null_interceptor):
+    transport = transports.DocumentSchemaServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentSchemaServiceRestInterceptor(),
+        )
+    client = DocumentSchemaServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "post_list_document_schemas") as post, \
+        mock.patch.object(transports.DocumentSchemaServiceRestInterceptor, "pre_list_document_schemas") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_schema_service.ListDocumentSchemasRequest.pb(document_schema_service.ListDocumentSchemasRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_schema_service.ListDocumentSchemasResponse.to_json(document_schema_service.ListDocumentSchemasResponse())
+        req.return_value.content = return_value
+
+        request = document_schema_service.ListDocumentSchemasRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_schema_service.ListDocumentSchemasResponse()
+
+        client.list_document_schemas(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_schema_empty_call_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document_schema),
+            '__call__') as call:
+        client.create_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.CreateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_document_schema_empty_call_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document_schema),
+            '__call__') as call:
+        client.update_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.UpdateDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_document_schema_empty_call_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document_schema),
+            '__call__') as call:
+        client.get_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.GetDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_schema_empty_call_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document_schema),
+            '__call__') as call:
+        client.delete_document_schema(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.DeleteDocumentSchemaRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_document_schemas_empty_call_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_document_schemas),
+            '__call__') as call:
+        client.list_document_schemas(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_schema_service.ListDocumentSchemasRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.DocumentSchemaServiceGrpcTransport,
+    )
+
+def test_document_schema_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.DocumentSchemaServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_document_schema_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.DocumentSchemaServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'create_document_schema',
+        'update_document_schema',
+        'get_document_schema',
+        'delete_document_schema',
+        'list_document_schemas',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_document_schema_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentSchemaServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_document_schema_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.document_schema_service.transports.DocumentSchemaServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentSchemaServiceTransport()
+        adc.assert_called_once()
+
+
+def test_document_schema_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        DocumentSchemaServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentSchemaServiceGrpcTransport,
+        transports.DocumentSchemaServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_document_schema_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentSchemaServiceGrpcTransport,
+        transports.DocumentSchemaServiceGrpcAsyncIOTransport,
+        transports.DocumentSchemaServiceRestTransport,
+    ],
+)
+def test_document_schema_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.DocumentSchemaServiceGrpcTransport, grpc_helpers),
+        (transports.DocumentSchemaServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_document_schema_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.DocumentSchemaServiceGrpcTransport, transports.DocumentSchemaServiceGrpcAsyncIOTransport])
+def test_document_schema_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_document_schema_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.DocumentSchemaServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_schema_service_host_no_port(transport_name):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_schema_service_host_with_port(transport_name):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_document_schema_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = DocumentSchemaServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = DocumentSchemaServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.create_document_schema._session
+    session2 = client2.transport.create_document_schema._session
+    assert session1 != session2
+    session1 = client1.transport.update_document_schema._session
+    session2 = client2.transport.update_document_schema._session
+    assert session1 != session2
+    session1 = client1.transport.get_document_schema._session
+    session2 = client2.transport.get_document_schema._session
+    assert session1 != session2
+    session1 = client1.transport.delete_document_schema._session
+    session2 = client2.transport.delete_document_schema._session
+    assert session1 != session2
+    session1 = client1.transport.list_document_schemas._session
+    session2 = client2.transport.list_document_schemas._session
+    assert session1 != session2
+def test_document_schema_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentSchemaServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_document_schema_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentSchemaServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentSchemaServiceGrpcTransport, transports.DocumentSchemaServiceGrpcAsyncIOTransport])
+def test_document_schema_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentSchemaServiceGrpcTransport, transports.DocumentSchemaServiceGrpcAsyncIOTransport])
+def test_document_schema_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_document_schema_path():
+    project = "squid"
+    location = "clam"
+    document_schema = "whelk"
+    expected = "projects/{project}/locations/{location}/documentSchemas/{document_schema}".format(project=project, location=location, document_schema=document_schema, )
+    actual = DocumentSchemaServiceClient.document_schema_path(project, location, document_schema)
+    assert expected == actual
+
+
+def test_parse_document_schema_path():
+    expected = {
+        "project": "octopus",
+        "location": "oyster",
+        "document_schema": "nudibranch",
+    }
+    path = DocumentSchemaServiceClient.document_schema_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_document_schema_path(path)
+    assert expected == actual
+
+def test_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = DocumentSchemaServiceClient.location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = DocumentSchemaServiceClient.location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_location_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "scallop"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = DocumentSchemaServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "abalone",
+    }
+    path = DocumentSchemaServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "squid"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = DocumentSchemaServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "clam",
+    }
+    path = DocumentSchemaServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "whelk"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = DocumentSchemaServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "octopus",
+    }
+    path = DocumentSchemaServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "oyster"
+    expected = "projects/{project}".format(project=project, )
+    actual = DocumentSchemaServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "nudibranch",
+    }
+    path = DocumentSchemaServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = DocumentSchemaServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = DocumentSchemaServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentSchemaServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.DocumentSchemaServiceTransport, '_prep_wrapped_messages') as prep:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.DocumentSchemaServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = DocumentSchemaServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = DocumentSchemaServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = DocumentSchemaServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = DocumentSchemaServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (DocumentSchemaServiceClient, transports.DocumentSchemaServiceGrpcTransport),
+    (DocumentSchemaServiceAsyncClient, transports.DocumentSchemaServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_service.py
new file mode 100644
index 000000000000..d8899dbfeaf9
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_document_service.py
@@ -0,0 +1,7404 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.document_service import DocumentServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.document_service import DocumentServiceClient
+from google.cloud.contentwarehouse_v1.services.document_service import pagers
+from google.cloud.contentwarehouse_v1.services.document_service import transports
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import document as gcc_document
+from google.cloud.contentwarehouse_v1.types import document_service
+from google.cloud.contentwarehouse_v1.types import document_service_request
+from google.cloud.contentwarehouse_v1.types import filters
+from google.cloud.contentwarehouse_v1.types import histogram
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.documentai_v1.types import barcode
+from google.cloud.documentai_v1.types import document as gcd_document
+from google.cloud.documentai_v1.types import geometry
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2  # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+from google.protobuf import any_pb2  # type: ignore
+from google.protobuf import duration_pb2  # type: ignore
+from google.protobuf import field_mask_pb2  # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.protobuf import wrappers_pb2  # type: ignore
+from google.rpc import status_pb2  # type: ignore
+from google.type import color_pb2  # type: ignore
+from google.type import date_pb2  # type: ignore
+from google.type import datetime_pb2  # type: ignore
+from google.type import expr_pb2  # type: ignore
+from google.type import interval_pb2  # type: ignore
+from google.type import money_pb2  # type: ignore
+from google.type import postal_address_pb2  # type: ignore
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert DocumentServiceClient._get_default_mtls_endpoint(None) is None
+    assert DocumentServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert DocumentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert DocumentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert DocumentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert DocumentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert DocumentServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert DocumentServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert DocumentServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            DocumentServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert DocumentServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert DocumentServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert DocumentServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            DocumentServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert DocumentServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert DocumentServiceClient._get_client_cert_source(None, False) is None
+    assert DocumentServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert DocumentServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert DocumentServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert DocumentServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(DocumentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceClient))
+@mock.patch.object(DocumentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = DocumentServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert DocumentServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert DocumentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DocumentServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert DocumentServiceClient._get_api_endpoint(None, None, default_universe, "always") == DocumentServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DocumentServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert DocumentServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert DocumentServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        DocumentServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert DocumentServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert DocumentServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert DocumentServiceClient._get_universe_domain(None, None) == DocumentServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        DocumentServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentServiceClient, "grpc"),
+    (DocumentServiceAsyncClient, "grpc_asyncio"),
+    (DocumentServiceClient, "rest"),
+])
+def test_document_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.DocumentServiceGrpcTransport, "grpc"),
+    (transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.DocumentServiceRestTransport, "rest"),
+])
+def test_document_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (DocumentServiceClient, "grpc"),
+    (DocumentServiceAsyncClient, "grpc_asyncio"),
+    (DocumentServiceClient, "rest"),
+])
+def test_document_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_document_service_client_get_transport_class():
+    transport = DocumentServiceClient.get_transport_class()
+    available_transports = [
+        transports.DocumentServiceGrpcTransport,
+        transports.DocumentServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = DocumentServiceClient.get_transport_class("grpc")
+    assert transport == transports.DocumentServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"),
+])
+@mock.patch.object(DocumentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceClient))
+@mock.patch.object(DocumentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceAsyncClient))
+def test_document_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(DocumentServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(DocumentServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc", "true"),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc", "false"),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest", "true"),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(DocumentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceClient))
+@mock.patch.object(DocumentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_document_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    DocumentServiceClient, DocumentServiceAsyncClient
+])
+@mock.patch.object(DocumentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentServiceClient))
+@mock.patch.object(DocumentServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DocumentServiceAsyncClient))
+def test_document_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    DocumentServiceClient, DocumentServiceAsyncClient
+])
+@mock.patch.object(DocumentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceClient))
+@mock.patch.object(DocumentServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DocumentServiceAsyncClient))
+def test_document_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = DocumentServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = DocumentServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc"),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest"),
+])
+def test_document_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (DocumentServiceClient, transports.DocumentServiceRestTransport, "rest", None),
+])
+def test_document_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_document_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_service.transports.DocumentServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = DocumentServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport, "grpc", grpc_helpers),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_document_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.CreateDocumentRequest,
+  dict,
+])
+def test_create_document(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.CreateDocumentResponse(
+        )
+        response = client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.CreateDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.CreateDocumentResponse)
+
+
+def test_create_document_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.CreateDocumentRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.create_document(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.CreateDocumentRequest(
+            parent='parent_value',
+        )
+
+def test_create_document_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document] = mock_rpc
+        request = {}
+        client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.create_document in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.create_document] = mock_rpc
+
+        request = {}
+        await client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.create_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_document_async(transport: str = 'grpc_asyncio', request_type=document_service_request.CreateDocumentRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_service.CreateDocumentResponse(
+        ))
+        response = await client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.CreateDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.CreateDocumentResponse)
+
+
+@pytest.mark.asyncio
+async def test_create_document_async_from_dict():
+    await test_create_document_async(request_type=dict)
+
+def test_create_document_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.CreateDocumentRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        call.return_value = document_service.CreateDocumentResponse()
+        client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_create_document_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.CreateDocumentRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.CreateDocumentResponse())
+        await client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_create_document_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.CreateDocumentResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_document(
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document
+        mock_val = gcc_document.Document(name='name_value')
+        assert arg == mock_val
+
+
+def test_create_document_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document(
+            document_service_request.CreateDocumentRequest(),
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_create_document_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.CreateDocumentResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.CreateDocumentResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_document(
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].document
+        mock_val = gcc_document.Document(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_create_document_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_document(
+            document_service_request.CreateDocumentRequest(),
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.GetDocumentRequest,
+  dict,
+])
+def test_get_document(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+            plain_text='plain_text_value',
+            raw_document_path='raw_document_path_value',
+        )
+        response = client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.GetDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+def test_get_document_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.GetDocumentRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.get_document(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.GetDocumentRequest(
+            name='name_value',
+        )
+
+def test_get_document_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_document] = mock_rpc
+        request = {}
+        client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_document_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.get_document in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.get_document] = mock_rpc
+
+        request = {}
+        await client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.get_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_document_async(transport: str = 'grpc_asyncio', request_type=document_service_request.GetDocumentRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+        ))
+        response = await client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.GetDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+@pytest.mark.asyncio
+async def test_get_document_async_from_dict():
+    await test_get_document_async(request_type=dict)
+
+def test_get_document_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.GetDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        call.return_value = gcc_document.Document()
+        client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_get_document_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.GetDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document())
+        await client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_get_document_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_get_document_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_document(
+            document_service_request.GetDocumentRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_get_document_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_get_document_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_document(
+            document_service_request.GetDocumentRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.UpdateDocumentRequest,
+  dict,
+])
+def test_update_document(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.UpdateDocumentResponse(
+        )
+        response = client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.UpdateDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.UpdateDocumentResponse)
+
+
+def test_update_document_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.UpdateDocumentRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.update_document(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.UpdateDocumentRequest(
+            name='name_value',
+        )
+
+def test_update_document_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_document] = mock_rpc
+        request = {}
+        client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_document_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.update_document in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.update_document] = mock_rpc
+
+        request = {}
+        await client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.update_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_document_async(transport: str = 'grpc_asyncio', request_type=document_service_request.UpdateDocumentRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_service.UpdateDocumentResponse(
+        ))
+        response = await client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.UpdateDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.UpdateDocumentResponse)
+
+
+@pytest.mark.asyncio
+async def test_update_document_async_from_dict():
+    await test_update_document_async(request_type=dict)
+
+def test_update_document_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.UpdateDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        call.return_value = document_service.UpdateDocumentResponse()
+        client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_update_document_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.UpdateDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.UpdateDocumentResponse())
+        await client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_update_document_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.UpdateDocumentResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.update_document(
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].document
+        mock_val = gcc_document.Document(name='name_value')
+        assert arg == mock_val
+
+
+def test_update_document_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_document(
+            document_service_request.UpdateDocumentRequest(),
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_update_document_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.UpdateDocumentResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.UpdateDocumentResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.update_document(
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].document
+        mock_val = gcc_document.Document(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_update_document_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.update_document(
+            document_service_request.UpdateDocumentRequest(),
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.DeleteDocumentRequest,
+  dict,
+])
+def test_delete_document(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.DeleteDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_document_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.DeleteDocumentRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.delete_document(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.DeleteDocumentRequest(
+            name='name_value',
+        )
+
+def test_delete_document_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc
+        request = {}
+        client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.delete_document in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.delete_document] = mock_rpc
+
+        request = {}
+        await client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.delete_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_document_async(transport: str = 'grpc_asyncio', request_type=document_service_request.DeleteDocumentRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.DeleteDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_document_async_from_dict():
+    await test_delete_document_async(request_type=dict)
+
+def test_delete_document_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.DeleteDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_delete_document_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.DeleteDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_delete_document_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_delete_document_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document(
+            document_service_request.DeleteDocumentRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_delete_document_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_delete_document_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_document(
+            document_service_request.DeleteDocumentRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.SearchDocumentsRequest,
+  dict,
+])
+def test_search_documents(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SearchDocumentsResponse(
+            next_page_token='next_page_token_value',
+            total_size=1086,
+            question_answer='question_answer_value',
+        )
+        response = client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.SearchDocumentsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.SearchDocumentsPager)
+    assert response.next_page_token == 'next_page_token_value'
+    assert response.total_size == 1086
+    assert response.question_answer == 'question_answer_value'
+
+
+def test_search_documents_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.SearchDocumentsRequest(
+        parent='parent_value',
+        page_token='page_token_value',
+        order_by='order_by_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.search_documents(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.SearchDocumentsRequest(
+            parent='parent_value',
+            page_token='page_token_value',
+            order_by='order_by_value',
+        )
+
+def test_search_documents_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.search_documents in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.search_documents] = mock_rpc
+        request = {}
+        client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.search_documents(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_search_documents_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.search_documents in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.search_documents] = mock_rpc
+
+        request = {}
+        await client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.search_documents(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_search_documents_async(transport: str = 'grpc_asyncio', request_type=document_service_request.SearchDocumentsRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_service.SearchDocumentsResponse(
+            next_page_token='next_page_token_value',
+            total_size=1086,
+            question_answer='question_answer_value',
+        ))
+        response = await client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.SearchDocumentsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.SearchDocumentsAsyncPager)
+    assert response.next_page_token == 'next_page_token_value'
+    assert response.total_size == 1086
+    assert response.question_answer == 'question_answer_value'
+
+
+@pytest.mark.asyncio
+async def test_search_documents_async_from_dict():
+    await test_search_documents_async(request_type=dict)
+
+def test_search_documents_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.SearchDocumentsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        call.return_value = document_service.SearchDocumentsResponse()
+        client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_search_documents_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.SearchDocumentsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SearchDocumentsResponse())
+        await client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_search_documents_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SearchDocumentsResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.search_documents(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_search_documents_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.search_documents(
+            document_service_request.SearchDocumentsRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_search_documents_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SearchDocumentsResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SearchDocumentsResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.search_documents(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_search_documents_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.search_documents(
+            document_service_request.SearchDocumentsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_search_documents_pager(transport_name: str = "grpc"):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='abc',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[],
+                next_page_token='def',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        expected_metadata = ()
+        retry = retries.Retry()
+        timeout = 5
+        expected_metadata = tuple(expected_metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ('parent', ''),
+            )),
+        )
+        pager = client.search_documents(request={}, retry=retry, timeout=timeout)
+
+        assert pager._metadata == expected_metadata
+        assert pager._retry == retry
+        assert pager._timeout == timeout
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_service.SearchDocumentsResponse.MatchingDocument)
+                   for i in results)
+def test_search_documents_pages(transport_name: str = "grpc"):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='abc',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[],
+                next_page_token='def',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.search_documents(request={}).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.asyncio
+async def test_search_documents_async_pager():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='abc',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[],
+                next_page_token='def',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.search_documents(request={},)
+        assert async_pager.next_page_token == 'abc'
+        responses = []
+        async for response in async_pager: # pragma: no branch
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(isinstance(i, document_service.SearchDocumentsResponse.MatchingDocument)
+                for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_search_documents_async_pages():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='abc',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[],
+                next_page_token='def',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+        # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+        async for page_ in ( # pragma: no branch
+            await client.search_documents(request={})
+        ).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.LockDocumentRequest,
+  dict,
+])
+def test_lock_document(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+            plain_text='plain_text_value',
+            raw_document_path='raw_document_path_value',
+        )
+        response = client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.LockDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+def test_lock_document_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.LockDocumentRequest(
+        name='name_value',
+        collection_id='collection_id_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.lock_document(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.LockDocumentRequest(
+            name='name_value',
+            collection_id='collection_id_value',
+        )
+
+def test_lock_document_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.lock_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.lock_document] = mock_rpc
+        request = {}
+        client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.lock_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_lock_document_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.lock_document in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.lock_document] = mock_rpc
+
+        request = {}
+        await client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.lock_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_lock_document_async(transport: str = 'grpc_asyncio', request_type=document_service_request.LockDocumentRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+        ))
+        response = await client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.LockDocumentRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+@pytest.mark.asyncio
+async def test_lock_document_async_from_dict():
+    await test_lock_document_async(request_type=dict)
+
+def test_lock_document_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.LockDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        call.return_value = gcc_document.Document()
+        client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_lock_document_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.LockDocumentRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document())
+        await client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_lock_document_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.lock_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_lock_document_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.lock_document(
+            document_service_request.LockDocumentRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_lock_document_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = gcc_document.Document()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.lock_document(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_lock_document_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.lock_document(
+            document_service_request.LockDocumentRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.FetchAclRequest,
+  dict,
+])
+def test_fetch_acl(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.FetchAclResponse(
+        )
+        response = client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.FetchAclRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.FetchAclResponse)
+
+
+def test_fetch_acl_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.FetchAclRequest(
+        resource='resource_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.fetch_acl(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.FetchAclRequest(
+            resource='resource_value',
+        )
+
+def test_fetch_acl_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.fetch_acl in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.fetch_acl] = mock_rpc
+        request = {}
+        client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.fetch_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_fetch_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.fetch_acl in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.fetch_acl] = mock_rpc
+
+        request = {}
+        await client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.fetch_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_fetch_acl_async(transport: str = 'grpc_asyncio', request_type=document_service_request.FetchAclRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_service.FetchAclResponse(
+        ))
+        response = await client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.FetchAclRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.FetchAclResponse)
+
+
+@pytest.mark.asyncio
+async def test_fetch_acl_async_from_dict():
+    await test_fetch_acl_async(request_type=dict)
+
+def test_fetch_acl_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.FetchAclRequest()
+
+    request.resource = 'resource_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        call.return_value = document_service.FetchAclResponse()
+        client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'resource=resource_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_fetch_acl_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.FetchAclRequest()
+
+    request.resource = 'resource_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.FetchAclResponse())
+        await client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'resource=resource_value',
+    ) in kw['metadata']
+
+
+def test_fetch_acl_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.FetchAclResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.fetch_acl(
+            resource='resource_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].resource
+        mock_val = 'resource_value'
+        assert arg == mock_val
+
+
+def test_fetch_acl_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.fetch_acl(
+            document_service_request.FetchAclRequest(),
+            resource='resource_value',
+        )
+
+@pytest.mark.asyncio
+async def test_fetch_acl_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.FetchAclResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.FetchAclResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.fetch_acl(
+            resource='resource_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].resource
+        mock_val = 'resource_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_fetch_acl_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.fetch_acl(
+            document_service_request.FetchAclRequest(),
+            resource='resource_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.SetAclRequest,
+  dict,
+])
+def test_set_acl(request_type, transport: str = 'grpc'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SetAclResponse(
+        )
+        response = client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.SetAclRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.SetAclResponse)
+
+
+def test_set_acl_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = document_service_request.SetAclRequest(
+        resource='resource_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.set_acl(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == document_service_request.SetAclRequest(
+            resource='resource_value',
+        )
+
+def test_set_acl_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.set_acl in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.set_acl] = mock_rpc
+        request = {}
+        client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.set_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_set_acl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = DocumentServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.set_acl in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.set_acl] = mock_rpc
+
+        request = {}
+        await client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.set_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_set_acl_async(transport: str = 'grpc_asyncio', request_type=document_service_request.SetAclRequest):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(document_service.SetAclResponse(
+        ))
+        response = await client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = document_service_request.SetAclRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.SetAclResponse)
+
+
+@pytest.mark.asyncio
+async def test_set_acl_async_from_dict():
+    await test_set_acl_async(request_type=dict)
+
+def test_set_acl_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.SetAclRequest()
+
+    request.resource = 'resource_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        call.return_value = document_service.SetAclResponse()
+        client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'resource=resource_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_set_acl_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = document_service_request.SetAclRequest()
+
+    request.resource = 'resource_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SetAclResponse())
+        await client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'resource=resource_value',
+    ) in kw['metadata']
+
+
+def test_set_acl_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SetAclResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.set_acl(
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].resource
+        mock_val = 'resource_value'
+        assert arg == mock_val
+        arg = args[0].policy
+        mock_val = policy_pb2.Policy(version=774)
+        assert arg == mock_val
+
+
+def test_set_acl_flattened_error():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.set_acl(
+            document_service_request.SetAclRequest(),
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+
+@pytest.mark.asyncio
+async def test_set_acl_flattened_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = document_service.SetAclResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SetAclResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.set_acl(
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].resource
+        mock_val = 'resource_value'
+        assert arg == mock_val
+        arg = args[0].policy
+        mock_val = policy_pb2.Policy(version=774)
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_set_acl_flattened_error_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.set_acl(
+            document_service_request.SetAclRequest(),
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+
+
+def test_create_document_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_document] = mock_rpc
+
+        request = {}
+        client.create_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_create_document_rest_required_fields(request_type=document_service_request.CreateDocumentRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_service.CreateDocumentResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_service.CreateDocumentResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.create_document(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_create_document_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.create_document._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", "document", )))
+
+
+def test_create_document_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.CreateDocumentResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_service.CreateDocumentResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.create_document(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/documents" % client.transport._host, args[1])
+
+
+def test_create_document_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_document(
+            document_service_request.CreateDocumentRequest(),
+            parent='parent_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+
+def test_get_document_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_document] = mock_rpc
+
+        request = {}
+        client.get_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_get_document_rest_required_fields(request_type=document_service_request.GetDocumentRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = gcc_document.Document()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = gcc_document.Document.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.get_document(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_get_document_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.get_document._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_get_document_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document.Document()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = gcc_document.Document.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.get_document(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documents/*}:get" % client.transport._host, args[1])
+
+
+def test_get_document_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_document(
+            document_service_request.GetDocumentRequest(),
+            name='name_value',
+        )
+
+
+def test_update_document_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_document] = mock_rpc
+
+        request = {}
+        client.update_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_update_document_rest_required_fields(request_type=document_service_request.UpdateDocumentRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_service.UpdateDocumentResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "patch",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_service.UpdateDocumentResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.update_document(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_update_document_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.update_document._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", "document", )))
+
+
+def test_update_document_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.UpdateDocumentResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_service.UpdateDocumentResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.update_document(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documents/*}" % client.transport._host, args[1])
+
+
+def test_update_document_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_document(
+            document_service_request.UpdateDocumentRequest(),
+            name='name_value',
+            document=gcc_document.Document(name='name_value'),
+        )
+
+
+def test_delete_document_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc
+
+        request = {}
+        client.delete_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_delete_document_rest_required_fields(request_type=document_service_request.DeleteDocumentRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = None
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = ''
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.delete_document(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_delete_document_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.delete_document._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_delete_document_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.delete_document(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documents/*}:delete" % client.transport._host, args[1])
+
+
+def test_delete_document_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_document(
+            document_service_request.DeleteDocumentRequest(),
+            name='name_value',
+        )
+
+
+def test_search_documents_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.search_documents in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.search_documents] = mock_rpc
+
+        request = {}
+        client.search_documents(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.search_documents(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_search_documents_rest_required_fields(request_type=document_service_request.SearchDocumentsRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_documents._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_documents._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_service.SearchDocumentsResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_service.SearchDocumentsResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.search_documents(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_search_documents_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.search_documents._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", )))
+
+
+def test_search_documents_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.SearchDocumentsResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_service.SearchDocumentsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.search_documents(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/documents:search" % client.transport._host, args[1])
+
+
+def test_search_documents_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.search_documents(
+            document_service_request.SearchDocumentsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_search_documents_rest_pager(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # TODO(kbandes): remove this mock unless there's a good reason for it.
+        #with mock.patch.object(path_template, 'transcode') as transcode:
+        # Set the response as a series of pages
+        response = (
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='abc',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[],
+                next_page_token='def',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+                next_page_token='ghi',
+            ),
+            document_service.SearchDocumentsResponse(
+                matching_documents=[
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                    document_service.SearchDocumentsResponse.MatchingDocument(),
+                ],
+            ),
+        )
+        # Two responses for two calls
+        response = response + response
+
+        # Wrap the values into proper Response objs
+        response = tuple(document_service.SearchDocumentsResponse.to_json(x) for x in response)
+        return_values = tuple(Response() for i in response)
+        for return_val, response_val in zip(return_values, response):
+            return_val._content = response_val.encode('UTF-8')
+            return_val.status_code = 200
+        req.side_effect = return_values
+
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        pager = client.search_documents(request=sample_request)
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, document_service.SearchDocumentsResponse.MatchingDocument)
+                for i in results)
+
+        pages = list(client.search_documents(request=sample_request).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_lock_document_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.lock_document in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.lock_document] = mock_rpc
+
+        request = {}
+        client.lock_document(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.lock_document(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_lock_document_rest_required_fields(request_type=document_service_request.LockDocumentRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lock_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).lock_document._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = gcc_document.Document()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = gcc_document.Document.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.lock_document(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_lock_document_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.lock_document._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_lock_document_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document.Document()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = gcc_document.Document.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.lock_document(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/documents/*}:lock" % client.transport._host, args[1])
+
+
+def test_lock_document_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.lock_document(
+            document_service_request.LockDocumentRequest(),
+            name='name_value',
+        )
+
+
+def test_fetch_acl_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.fetch_acl in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.fetch_acl] = mock_rpc
+
+        request = {}
+        client.fetch_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.fetch_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_fetch_acl_rest_required_fields(request_type=document_service_request.FetchAclRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["resource"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_acl._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["resource"] = 'resource_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_acl._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "resource" in jsonified_request
+    assert jsonified_request["resource"] == 'resource_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_service.FetchAclResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_service.FetchAclResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.fetch_acl(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_fetch_acl_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.fetch_acl._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("resource", )))
+
+
+def test_fetch_acl_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.FetchAclResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            resource='resource_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_service.FetchAclResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.fetch_acl(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{resource=projects/*/locations/*/documents/*}:fetchAcl" % client.transport._host, args[1])
+
+
+def test_fetch_acl_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.fetch_acl(
+            document_service_request.FetchAclRequest(),
+            resource='resource_value',
+        )
+
+
+def test_set_acl_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.set_acl in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.set_acl] = mock_rpc
+
+        request = {}
+        client.set_acl(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.set_acl(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_set_acl_rest_required_fields(request_type=document_service_request.SetAclRequest):
+    transport_class = transports.DocumentServiceRestTransport
+
+    request_init = {}
+    request_init["resource"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_acl._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["resource"] = 'resource_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_acl._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "resource" in jsonified_request
+    assert jsonified_request["resource"] == 'resource_value'
+
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = document_service.SetAclResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = document_service.SetAclResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.set_acl(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_set_acl_rest_unset_required_fields():
+    transport = transports.DocumentServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.set_acl._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("resource", "policy", )))
+
+
+def test_set_acl_rest_flattened():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.SetAclResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = document_service.SetAclResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.set_acl(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{resource=projects/*/locations/*/documents/*}:setAcl" % client.transport._host, args[1])
+
+
+def test_set_acl_rest_flattened_error(transport: str = 'rest'):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.set_acl(
+            document_service_request.SetAclRequest(),
+            resource='resource_value',
+            policy=policy_pb2.Policy(version=774),
+        )
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = DocumentServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = DocumentServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = DocumentServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.DocumentServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.DocumentServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.DocumentServiceGrpcTransport,
+    transports.DocumentServiceGrpcAsyncIOTransport,
+    transports.DocumentServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = DocumentServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        call.return_value = document_service.CreateDocumentResponse()
+        client.create_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.CreateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_document_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        call.return_value = gcc_document.Document()
+        client.get_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.GetDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_document_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        call.return_value = document_service.UpdateDocumentResponse()
+        client.update_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.UpdateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.DeleteDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_search_documents_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        call.return_value = document_service.SearchDocumentsResponse()
+        client.search_documents(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SearchDocumentsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_lock_document_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        call.return_value = gcc_document.Document()
+        client.lock_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.LockDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_fetch_acl_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        call.return_value = document_service.FetchAclResponse()
+        client.fetch_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.FetchAclRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_set_acl_empty_call_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        call.return_value = document_service.SetAclResponse()
+        client.set_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SetAclRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = DocumentServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_document_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.CreateDocumentResponse(
+        ))
+        await client.create_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.CreateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_document_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+        ))
+        await client.get_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.GetDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_document_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.UpdateDocumentResponse(
+        ))
+        await client.update_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.UpdateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_document_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.DeleteDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_search_documents_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SearchDocumentsResponse(
+            next_page_token='next_page_token_value',
+            total_size=1086,
+            question_answer='question_answer_value',
+        ))
+        await client.search_documents(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SearchDocumentsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_lock_document_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcc_document.Document(
+            name='name_value',
+            reference_id='reference_id_value',
+            display_name='display_name_value',
+            title='title_value',
+            display_uri='display_uri_value',
+            document_schema_name='document_schema_name_value',
+            structured_content_uri='structured_content_uri_value',
+            raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+            async_enabled=True,
+            content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+            text_extraction_disabled=True,
+            text_extraction_enabled=True,
+            creator='creator_value',
+            updater='updater_value',
+            legal_hold=True,
+        ))
+        await client.lock_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.LockDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_fetch_acl_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.FetchAclResponse(
+        ))
+        await client.fetch_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.FetchAclRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_set_acl_empty_call_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document_service.SetAclResponse(
+        ))
+        await client.set_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SetAclRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = DocumentServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_create_document_rest_bad_request(request_type=document_service_request.CreateDocumentRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.create_document(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.CreateDocumentRequest,
+  dict,
+])
+def test_create_document_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.CreateDocumentResponse(
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_service.CreateDocumentResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.create_document(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.CreateDocumentResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_document_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_create_document") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_create_document") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.CreateDocumentRequest.pb(document_service_request.CreateDocumentRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_service.CreateDocumentResponse.to_json(document_service.CreateDocumentResponse())
+        req.return_value.content = return_value
+
+        request = document_service_request.CreateDocumentRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_service.CreateDocumentResponse()
+
+        client.create_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_document_rest_bad_request(request_type=document_service_request.GetDocumentRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.get_document(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.GetDocumentRequest,
+  dict,
+])
+def test_get_document_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document.Document(
+              name='name_value',
+              reference_id='reference_id_value',
+              display_name='display_name_value',
+              title='title_value',
+              display_uri='display_uri_value',
+              document_schema_name='document_schema_name_value',
+              structured_content_uri='structured_content_uri_value',
+              raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+              async_enabled=True,
+              content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+              text_extraction_disabled=True,
+              text_extraction_enabled=True,
+              creator='creator_value',
+              updater='updater_value',
+              legal_hold=True,
+            plain_text='plain_text_value',
+            raw_document_path='raw_document_path_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = gcc_document.Document.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.get_document(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_document_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_get_document") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_get_document") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.GetDocumentRequest.pb(document_service_request.GetDocumentRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = gcc_document.Document.to_json(gcc_document.Document())
+        req.return_value.content = return_value
+
+        request = document_service_request.GetDocumentRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = gcc_document.Document()
+
+        client.get_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_update_document_rest_bad_request(request_type=document_service_request.UpdateDocumentRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.update_document(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.UpdateDocumentRequest,
+  dict,
+])
+def test_update_document_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.UpdateDocumentResponse(
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_service.UpdateDocumentResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.update_document(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.UpdateDocumentResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_update_document_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_update_document") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_update_document") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.UpdateDocumentRequest.pb(document_service_request.UpdateDocumentRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_service.UpdateDocumentResponse.to_json(document_service.UpdateDocumentResponse())
+        req.return_value.content = return_value
+
+        request = document_service_request.UpdateDocumentRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_service.UpdateDocumentResponse()
+
+        client.update_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_delete_document_rest_bad_request(request_type=document_service_request.DeleteDocumentRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.delete_document(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.DeleteDocumentRequest,
+  dict,
+])
+def test_delete_document_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.delete_document(request)
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_document_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_delete_document") as pre:
+        pre.assert_not_called()
+        pb_message = document_service_request.DeleteDocumentRequest.pb(document_service_request.DeleteDocumentRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+
+        request = document_service_request.DeleteDocumentRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+
+        client.delete_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+
+
+def test_search_documents_rest_bad_request(request_type=document_service_request.SearchDocumentsRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.search_documents(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.SearchDocumentsRequest,
+  dict,
+])
+def test_search_documents_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.SearchDocumentsResponse(
+              next_page_token='next_page_token_value',
+              total_size=1086,
+              question_answer='question_answer_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_service.SearchDocumentsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.search_documents(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.SearchDocumentsPager)
+    assert response.next_page_token == 'next_page_token_value'
+    assert response.total_size == 1086
+    assert response.question_answer == 'question_answer_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_search_documents_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_search_documents") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_search_documents") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.SearchDocumentsRequest.pb(document_service_request.SearchDocumentsRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_service.SearchDocumentsResponse.to_json(document_service.SearchDocumentsResponse())
+        req.return_value.content = return_value
+
+        request = document_service_request.SearchDocumentsRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_service.SearchDocumentsResponse()
+
+        client.search_documents(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_lock_document_rest_bad_request(request_type=document_service_request.LockDocumentRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.lock_document(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.LockDocumentRequest,
+  dict,
+])
+def test_lock_document_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = gcc_document.Document(
+              name='name_value',
+              reference_id='reference_id_value',
+              display_name='display_name_value',
+              title='title_value',
+              display_uri='display_uri_value',
+              document_schema_name='document_schema_name_value',
+              structured_content_uri='structured_content_uri_value',
+              raw_document_file_type=gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF,
+              async_enabled=True,
+              content_category=gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE,
+              text_extraction_disabled=True,
+              text_extraction_enabled=True,
+              creator='creator_value',
+              updater='updater_value',
+              legal_hold=True,
+            plain_text='plain_text_value',
+            raw_document_path='raw_document_path_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = gcc_document.Document.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.lock_document(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, gcc_document.Document)
+    assert response.name == 'name_value'
+    assert response.reference_id == 'reference_id_value'
+    assert response.display_name == 'display_name_value'
+    assert response.title == 'title_value'
+    assert response.display_uri == 'display_uri_value'
+    assert response.document_schema_name == 'document_schema_name_value'
+    assert response.structured_content_uri == 'structured_content_uri_value'
+    assert response.raw_document_file_type == gcc_document.RawDocumentFileType.RAW_DOCUMENT_FILE_TYPE_PDF
+    assert response.async_enabled is True
+    assert response.content_category == gcc_document.ContentCategory.CONTENT_CATEGORY_IMAGE
+    assert response.text_extraction_disabled is True
+    assert response.text_extraction_enabled is True
+    assert response.creator == 'creator_value'
+    assert response.updater == 'updater_value'
+    assert response.legal_hold is True
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_lock_document_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_lock_document") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_lock_document") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.LockDocumentRequest.pb(document_service_request.LockDocumentRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = gcc_document.Document.to_json(gcc_document.Document())
+        req.return_value.content = return_value
+
+        request = document_service_request.LockDocumentRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = gcc_document.Document()
+
+        client.lock_document(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_fetch_acl_rest_bad_request(request_type=document_service_request.FetchAclRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.fetch_acl(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.FetchAclRequest,
+  dict,
+])
+def test_fetch_acl_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.FetchAclResponse(
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_service.FetchAclResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.fetch_acl(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.FetchAclResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_fetch_acl_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_fetch_acl") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_fetch_acl") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.FetchAclRequest.pb(document_service_request.FetchAclRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_service.FetchAclResponse.to_json(document_service.FetchAclResponse())
+        req.return_value.content = return_value
+
+        request = document_service_request.FetchAclRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_service.FetchAclResponse()
+
+        client.fetch_acl(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_set_acl_rest_bad_request(request_type=document_service_request.SetAclRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.set_acl(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  document_service_request.SetAclRequest,
+  dict,
+])
+def test_set_acl_rest_call_success(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'resource': 'projects/sample1/locations/sample2/documents/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = document_service.SetAclResponse(
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = document_service.SetAclResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.set_acl(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, document_service.SetAclResponse)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_set_acl_rest_interceptors(null_interceptor):
+    transport = transports.DocumentServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.DocumentServiceRestInterceptor(),
+        )
+    client = DocumentServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "post_set_acl") as post, \
+        mock.patch.object(transports.DocumentServiceRestInterceptor, "pre_set_acl") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = document_service_request.SetAclRequest.pb(document_service_request.SetAclRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = document_service.SetAclResponse.to_json(document_service.SetAclResponse())
+        req.return_value.content = return_value
+
+        request = document_service_request.SetAclRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = document_service.SetAclResponse()
+
+        client.set_acl(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_document_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_document),
+            '__call__') as call:
+        client.create_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.CreateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_document_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_document),
+            '__call__') as call:
+        client.get_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.GetDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_document_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_document),
+            '__call__') as call:
+        client.update_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.UpdateDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_document_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_document),
+            '__call__') as call:
+        client.delete_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.DeleteDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_search_documents_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.search_documents),
+            '__call__') as call:
+        client.search_documents(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SearchDocumentsRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_lock_document_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.lock_document),
+            '__call__') as call:
+        client.lock_document(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.LockDocumentRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_fetch_acl_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.fetch_acl),
+            '__call__') as call:
+        client.fetch_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.FetchAclRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_set_acl_empty_call_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.set_acl),
+            '__call__') as call:
+        client.set_acl(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = document_service_request.SetAclRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.DocumentServiceGrpcTransport,
+    )
+
+def test_document_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.DocumentServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_document_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.document_service.transports.DocumentServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.DocumentServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'create_document',
+        'get_document',
+        'update_document',
+        'delete_document',
+        'search_documents',
+        'lock_document',
+        'fetch_acl',
+        'set_acl',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_document_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.document_service.transports.DocumentServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_document_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.document_service.transports.DocumentServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.DocumentServiceTransport()
+        adc.assert_called_once()
+
+
+def test_document_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        DocumentServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentServiceGrpcTransport,
+        transports.DocumentServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_document_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.DocumentServiceGrpcTransport,
+        transports.DocumentServiceGrpcAsyncIOTransport,
+        transports.DocumentServiceRestTransport,
+    ],
+)
+def test_document_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.DocumentServiceGrpcTransport, grpc_helpers),
+        (transports.DocumentServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_document_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.DocumentServiceGrpcTransport, transports.DocumentServiceGrpcAsyncIOTransport])
+def test_document_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_document_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.DocumentServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_service_host_no_port(transport_name):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_document_service_host_with_port(transport_name):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_document_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = DocumentServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = DocumentServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.create_document._session
+    session2 = client2.transport.create_document._session
+    assert session1 != session2
+    session1 = client1.transport.get_document._session
+    session2 = client2.transport.get_document._session
+    assert session1 != session2
+    session1 = client1.transport.update_document._session
+    session2 = client2.transport.update_document._session
+    assert session1 != session2
+    session1 = client1.transport.delete_document._session
+    session2 = client2.transport.delete_document._session
+    assert session1 != session2
+    session1 = client1.transport.search_documents._session
+    session2 = client2.transport.search_documents._session
+    assert session1 != session2
+    session1 = client1.transport.lock_document._session
+    session2 = client2.transport.lock_document._session
+    assert session1 != session2
+    session1 = client1.transport.fetch_acl._session
+    session2 = client2.transport.fetch_acl._session
+    assert session1 != session2
+    session1 = client1.transport.set_acl._session
+    session2 = client2.transport.set_acl._session
+    assert session1 != session2
+def test_document_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_document_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.DocumentServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentServiceGrpcTransport, transports.DocumentServiceGrpcAsyncIOTransport])
+def test_document_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.DocumentServiceGrpcTransport, transports.DocumentServiceGrpcAsyncIOTransport])
+def test_document_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_document_path():
+    project = "squid"
+    location = "clam"
+    document = "whelk"
+    expected = "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+    actual = DocumentServiceClient.document_path(project, location, document)
+    assert expected == actual
+
+
+def test_parse_document_path():
+    expected = {
+        "project": "octopus",
+        "location": "oyster",
+        "document": "nudibranch",
+    }
+    path = DocumentServiceClient.document_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_document_path(path)
+    assert expected == actual
+
+def test_document_schema_path():
+    project = "cuttlefish"
+    location = "mussel"
+    document_schema = "winkle"
+    expected = "projects/{project}/locations/{location}/documentSchemas/{document_schema}".format(project=project, location=location, document_schema=document_schema, )
+    actual = DocumentServiceClient.document_schema_path(project, location, document_schema)
+    assert expected == actual
+
+
+def test_parse_document_schema_path():
+    expected = {
+        "project": "nautilus",
+        "location": "scallop",
+        "document_schema": "abalone",
+    }
+    path = DocumentServiceClient.document_schema_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_document_schema_path(path)
+    assert expected == actual
+
+def test_location_path():
+    project = "squid"
+    location = "clam"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = DocumentServiceClient.location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_location_path():
+    expected = {
+        "project": "whelk",
+        "location": "octopus",
+    }
+    path = DocumentServiceClient.location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_location_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "oyster"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = DocumentServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "nudibranch",
+    }
+    path = DocumentServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "cuttlefish"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = DocumentServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "mussel",
+    }
+    path = DocumentServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "winkle"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = DocumentServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "nautilus",
+    }
+    path = DocumentServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "scallop"
+    expected = "projects/{project}".format(project=project, )
+    actual = DocumentServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "abalone",
+    }
+    path = DocumentServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "squid"
+    location = "clam"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = DocumentServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "whelk",
+        "location": "octopus",
+    }
+    path = DocumentServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = DocumentServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.DocumentServiceTransport, '_prep_wrapped_messages') as prep:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.DocumentServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = DocumentServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = DocumentServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = DocumentServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = DocumentServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (DocumentServiceClient, transports.DocumentServiceGrpcTransport),
+    (DocumentServiceAsyncClient, transports.DocumentServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py
new file mode 100644
index 000000000000..fb5796915d8c
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py
@@ -0,0 +1,2408 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation
+from google.api_core import operation_async  # type: ignore
+from google.api_core import operations_v1
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.pipeline_service import PipelineServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.pipeline_service import PipelineServiceClient
+from google.cloud.contentwarehouse_v1.services.pipeline_service import transports
+from google.cloud.contentwarehouse_v1.types import common
+from google.cloud.contentwarehouse_v1.types import pipeline_service
+from google.cloud.contentwarehouse_v1.types import pipelines
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+from google.type import expr_pb2  # type: ignore
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert PipelineServiceClient._get_default_mtls_endpoint(None) is None
+    assert PipelineServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert PipelineServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert PipelineServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert PipelineServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert PipelineServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert PipelineServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert PipelineServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert PipelineServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            PipelineServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert PipelineServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert PipelineServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert PipelineServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            PipelineServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert PipelineServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert PipelineServiceClient._get_client_cert_source(None, False) is None
+    assert PipelineServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert PipelineServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert PipelineServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert PipelineServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(PipelineServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceClient))
+@mock.patch.object(PipelineServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = PipelineServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert PipelineServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert PipelineServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == PipelineServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert PipelineServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert PipelineServiceClient._get_api_endpoint(None, None, default_universe, "always") == PipelineServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert PipelineServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == PipelineServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert PipelineServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert PipelineServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        PipelineServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert PipelineServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert PipelineServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert PipelineServiceClient._get_universe_domain(None, None) == PipelineServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        PipelineServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (PipelineServiceClient, "grpc"),
+    (PipelineServiceAsyncClient, "grpc_asyncio"),
+    (PipelineServiceClient, "rest"),
+])
+def test_pipeline_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.PipelineServiceGrpcTransport, "grpc"),
+    (transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.PipelineServiceRestTransport, "rest"),
+])
+def test_pipeline_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (PipelineServiceClient, "grpc"),
+    (PipelineServiceAsyncClient, "grpc_asyncio"),
+    (PipelineServiceClient, "rest"),
+])
+def test_pipeline_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_pipeline_service_client_get_transport_class():
+    transport = PipelineServiceClient.get_transport_class()
+    available_transports = [
+        transports.PipelineServiceGrpcTransport,
+        transports.PipelineServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = PipelineServiceClient.get_transport_class("grpc")
+    assert transport == transports.PipelineServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"),
+])
+@mock.patch.object(PipelineServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceClient))
+@mock.patch.object(PipelineServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceAsyncClient))
+def test_pipeline_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(PipelineServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(PipelineServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc", "true"),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc", "false"),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest", "true"),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(PipelineServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceClient))
+@mock.patch.object(PipelineServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_pipeline_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    PipelineServiceClient, PipelineServiceAsyncClient
+])
+@mock.patch.object(PipelineServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PipelineServiceClient))
+@mock.patch.object(PipelineServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PipelineServiceAsyncClient))
+def test_pipeline_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    PipelineServiceClient, PipelineServiceAsyncClient
+])
+@mock.patch.object(PipelineServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceClient))
+@mock.patch.object(PipelineServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PipelineServiceAsyncClient))
+def test_pipeline_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = PipelineServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = PipelineServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc"),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest"),
+])
+def test_pipeline_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc", grpc_helpers),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (PipelineServiceClient, transports.PipelineServiceRestTransport, "rest", None),
+])
+def test_pipeline_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_pipeline_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = PipelineServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport, "grpc", grpc_helpers),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_pipeline_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  pipeline_service.RunPipelineRequest,
+  dict,
+])
+def test_run_pipeline(request_type, transport: str = 'grpc'):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation(name='operations/spam')
+        response = client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = pipeline_service.RunPipelineRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, future.Future)
+
+
+def test_run_pipeline_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = pipeline_service.RunPipelineRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.run_pipeline(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == pipeline_service.RunPipelineRequest(
+            name='name_value',
+        )
+
+def test_run_pipeline_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = PipelineServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.run_pipeline in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.run_pipeline] = mock_rpc
+        request = {}
+        client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        # Operation methods call wrapper_fn to build a cached
+        # client._transport.operations_client instance on first rpc call.
+        # Subsequent calls should use the cached wrapper
+        wrapper_fn.reset_mock()
+
+        client.run_pipeline(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_run_pipeline_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = PipelineServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.run_pipeline in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.run_pipeline] = mock_rpc
+
+        request = {}
+        await client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        # Operation methods call wrapper_fn to build a cached
+        # client._transport.operations_client instance on first rpc call.
+        # Subsequent calls should use the cached wrapper
+        wrapper_fn.reset_mock()
+
+        await client.run_pipeline(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_run_pipeline_async(transport: str = 'grpc_asyncio', request_type=pipeline_service.RunPipelineRequest):
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation(name='operations/spam')
+        )
+        response = await client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = pipeline_service.RunPipelineRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_run_pipeline_async_from_dict():
+    await test_run_pipeline_async(request_type=dict)
+
+def test_run_pipeline_field_headers():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = pipeline_service.RunPipelineRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        call.return_value = operations_pb2.Operation(name='operations/op')
+        client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_run_pipeline_field_headers_async():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = pipeline_service.RunPipelineRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
+        await client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_run_pipeline_flattened():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation(name='operations/op')
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.run_pipeline(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_run_pipeline_flattened_error():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.run_pipeline(
+            pipeline_service.RunPipelineRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_run_pipeline_flattened_async():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation(name='operations/op')
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation(name='operations/spam')
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.run_pipeline(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_run_pipeline_flattened_error_async():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.run_pipeline(
+            pipeline_service.RunPipelineRequest(),
+            name='name_value',
+        )
+
+
+def test_run_pipeline_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = PipelineServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.run_pipeline in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.run_pipeline] = mock_rpc
+
+        request = {}
+        client.run_pipeline(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        # Operation methods build a cached wrapper on first rpc call
+        # subsequent calls should use the cached wrapper
+        wrapper_fn.reset_mock()
+
+        client.run_pipeline(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_run_pipeline_rest_required_fields(request_type=pipeline_service.RunPipelineRequest):
+    transport_class = transports.PipelineServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_pipeline._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).run_pipeline._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = operations_pb2.Operation(name='operations/spam')
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.run_pipeline(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_run_pipeline_rest_unset_required_fields():
+    transport = transports.PipelineServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.run_pipeline._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_run_pipeline_rest_flattened():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation(name='operations/spam')
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.run_pipeline(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*}:runPipeline" % client.transport._host, args[1])
+
+
+def test_run_pipeline_rest_flattened_error(transport: str = 'rest'):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.run_pipeline(
+            pipeline_service.RunPipelineRequest(),
+            name='name_value',
+        )
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = PipelineServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = PipelineServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = PipelineServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = PipelineServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = PipelineServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = PipelineServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.PipelineServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.PipelineServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.PipelineServiceGrpcTransport,
+    transports.PipelineServiceGrpcAsyncIOTransport,
+    transports.PipelineServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = PipelineServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_pipeline_empty_call_grpc():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        call.return_value = operations_pb2.Operation(name='operations/op')
+        client.run_pipeline(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = pipeline_service.RunPipelineRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = PipelineServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_run_pipeline_empty_call_grpc_asyncio():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation(name='operations/spam')
+        )
+        await client.run_pipeline(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = pipeline_service.RunPipelineRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = PipelineServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_run_pipeline_rest_bad_request(request_type=pipeline_service.RunPipelineRequest):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.run_pipeline(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  pipeline_service.RunPipelineRequest,
+  dict,
+])
+def test_run_pipeline_rest_call_success(request_type):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation(name='operations/spam')
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.run_pipeline(request)
+
+    # Establish that the response is the type that we expect.
+    json_return_value = json_format.MessageToJson(return_value)
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_run_pipeline_rest_interceptors(null_interceptor):
+    transport = transports.PipelineServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.PipelineServiceRestInterceptor(),
+        )
+    client = PipelineServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(operation.Operation, "_set_result_from_operation"), \
+        mock.patch.object(transports.PipelineServiceRestInterceptor, "post_run_pipeline") as post, \
+        mock.patch.object(transports.PipelineServiceRestInterceptor, "pre_run_pipeline") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = pipeline_service.RunPipelineRequest.pb(pipeline_service.RunPipelineRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = json_format.MessageToJson(operations_pb2.Operation())
+        req.return_value.content = return_value
+
+        request = pipeline_service.RunPipelineRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = operations_pb2.Operation()
+
+        client.run_pipeline(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_run_pipeline_empty_call_rest():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.run_pipeline),
+            '__call__') as call:
+        client.run_pipeline(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = pipeline_service.RunPipelineRequest()
+
+        assert args[0] == request_msg
+
+
+def test_pipeline_service_rest_lro_client():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    transport = client.transport
+
+    # Ensure that we have an api-core operations client.
+    assert isinstance(
+        transport.operations_client,
+operations_v1.AbstractOperationsClient,
+    )
+
+    # Ensure that subsequent calls to the property send the exact same object.
+    assert transport.operations_client is transport.operations_client
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.PipelineServiceGrpcTransport,
+    )
+
+def test_pipeline_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.PipelineServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_pipeline_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.PipelineServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'run_pipeline',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Additionally, the LRO client (a property) should
+    # also raise NotImplementedError
+    with pytest.raises(NotImplementedError):
+        transport.operations_client
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_pipeline_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.PipelineServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_pipeline_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.pipeline_service.transports.PipelineServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.PipelineServiceTransport()
+        adc.assert_called_once()
+
+
+def test_pipeline_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        PipelineServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.PipelineServiceGrpcTransport,
+        transports.PipelineServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_pipeline_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.PipelineServiceGrpcTransport,
+        transports.PipelineServiceGrpcAsyncIOTransport,
+        transports.PipelineServiceRestTransport,
+    ],
+)
+def test_pipeline_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.PipelineServiceGrpcTransport, grpc_helpers),
+        (transports.PipelineServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_pipeline_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.PipelineServiceGrpcTransport, transports.PipelineServiceGrpcAsyncIOTransport])
+def test_pipeline_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_pipeline_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.PipelineServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_pipeline_service_host_no_port(transport_name):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_pipeline_service_host_with_port(transport_name):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_pipeline_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = PipelineServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = PipelineServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.run_pipeline._session
+    session2 = client2.transport.run_pipeline._session
+    assert session1 != session2
+def test_pipeline_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.PipelineServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_pipeline_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.PipelineServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.PipelineServiceGrpcTransport, transports.PipelineServiceGrpcAsyncIOTransport])
+def test_pipeline_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.PipelineServiceGrpcTransport, transports.PipelineServiceGrpcAsyncIOTransport])
+def test_pipeline_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_pipeline_service_grpc_lro_client():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+    transport = client.transport
+
+    # Ensure that we have a api-core operations client.
+    assert isinstance(
+        transport.operations_client,
+        operations_v1.OperationsClient,
+    )
+
+    # Ensure that subsequent calls to the property send the exact same object.
+    assert transport.operations_client is transport.operations_client
+
+
+def test_pipeline_service_grpc_lro_async_client():
+    client = PipelineServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc_asyncio',
+    )
+    transport = client.transport
+
+    # Ensure that we have a api-core operations client.
+    assert isinstance(
+        transport.operations_client,
+        operations_v1.OperationsAsyncClient,
+    )
+
+    # Ensure that subsequent calls to the property send the exact same object.
+    assert transport.operations_client is transport.operations_client
+
+
+def test_cloud_function_path():
+    project = "squid"
+    location = "clam"
+    function = "whelk"
+    expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, )
+    actual = PipelineServiceClient.cloud_function_path(project, location, function)
+    assert expected == actual
+
+
+def test_parse_cloud_function_path():
+    expected = {
+        "project": "octopus",
+        "location": "oyster",
+        "function": "nudibranch",
+    }
+    path = PipelineServiceClient.cloud_function_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_cloud_function_path(path)
+    assert expected == actual
+
+def test_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = PipelineServiceClient.location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = PipelineServiceClient.location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_location_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "scallop"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = PipelineServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "abalone",
+    }
+    path = PipelineServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "squid"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = PipelineServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "clam",
+    }
+    path = PipelineServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "whelk"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = PipelineServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "octopus",
+    }
+    path = PipelineServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "oyster"
+    expected = "projects/{project}".format(project=project, )
+    actual = PipelineServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "nudibranch",
+    }
+    path = PipelineServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = PipelineServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = PipelineServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = PipelineServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.PipelineServiceTransport, '_prep_wrapped_messages') as prep:
+        client = PipelineServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.PipelineServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = PipelineServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = PipelineServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = PipelineServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = PipelineServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (PipelineServiceClient, transports.PipelineServiceGrpcTransport),
+    (PipelineServiceAsyncClient, transports.PipelineServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py
new file mode 100644
index 000000000000..3d09a46d90b0
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py
@@ -0,0 +1,5333 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.rule_set_service import RuleSetServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.rule_set_service import RuleSetServiceClient
+from google.cloud.contentwarehouse_v1.services.rule_set_service import pagers
+from google.cloud.contentwarehouse_v1.services.rule_set_service import transports
+from google.cloud.contentwarehouse_v1.types import rule_engine
+from google.cloud.contentwarehouse_v1.types import ruleset_service_request
+from google.iam.v1 import policy_pb2  # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+from google.type import expr_pb2  # type: ignore
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert RuleSetServiceClient._get_default_mtls_endpoint(None) is None
+    assert RuleSetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert RuleSetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert RuleSetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert RuleSetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert RuleSetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert RuleSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert RuleSetServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert RuleSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            RuleSetServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert RuleSetServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert RuleSetServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert RuleSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            RuleSetServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert RuleSetServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert RuleSetServiceClient._get_client_cert_source(None, False) is None
+    assert RuleSetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert RuleSetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert RuleSetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert RuleSetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(RuleSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceClient))
+@mock.patch.object(RuleSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = RuleSetServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert RuleSetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert RuleSetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == RuleSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert RuleSetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert RuleSetServiceClient._get_api_endpoint(None, None, default_universe, "always") == RuleSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert RuleSetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == RuleSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert RuleSetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert RuleSetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        RuleSetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert RuleSetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert RuleSetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert RuleSetServiceClient._get_universe_domain(None, None) == RuleSetServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        RuleSetServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc"),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (RuleSetServiceClient, "grpc"),
+    (RuleSetServiceAsyncClient, "grpc_asyncio"),
+    (RuleSetServiceClient, "rest"),
+])
+def test_rule_set_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.RuleSetServiceGrpcTransport, "grpc"),
+    (transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.RuleSetServiceRestTransport, "rest"),
+])
+def test_rule_set_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (RuleSetServiceClient, "grpc"),
+    (RuleSetServiceAsyncClient, "grpc_asyncio"),
+    (RuleSetServiceClient, "rest"),
+])
+def test_rule_set_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_rule_set_service_client_get_transport_class():
+    transport = RuleSetServiceClient.get_transport_class()
+    available_transports = [
+        transports.RuleSetServiceGrpcTransport,
+        transports.RuleSetServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = RuleSetServiceClient.get_transport_class("grpc")
+    assert transport == transports.RuleSetServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc"),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"),
+])
+@mock.patch.object(RuleSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceClient))
+@mock.patch.object(RuleSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceAsyncClient))
+def test_rule_set_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(RuleSetServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(RuleSetServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc", "true"),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc", "false"),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", "true"),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(RuleSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceClient))
+@mock.patch.object(RuleSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_rule_set_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    RuleSetServiceClient, RuleSetServiceAsyncClient
+])
+@mock.patch.object(RuleSetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RuleSetServiceClient))
+@mock.patch.object(RuleSetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RuleSetServiceAsyncClient))
+def test_rule_set_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    RuleSetServiceClient, RuleSetServiceAsyncClient
+])
+@mock.patch.object(RuleSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceClient))
+@mock.patch.object(RuleSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuleSetServiceAsyncClient))
+def test_rule_set_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = RuleSetServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = RuleSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc"),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest"),
+])
+def test_rule_set_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc", grpc_helpers),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (RuleSetServiceClient, transports.RuleSetServiceRestTransport, "rest", None),
+])
+def test_rule_set_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_rule_set_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = RuleSetServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport, "grpc", grpc_helpers),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_rule_set_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.CreateRuleSetRequest,
+  dict,
+])
+def test_create_rule_set(request_type, transport: str = 'grpc'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        )
+        response = client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.CreateRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+def test_create_rule_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = ruleset_service_request.CreateRuleSetRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.create_rule_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == ruleset_service_request.CreateRuleSetRequest(
+            parent='parent_value',
+        )
+
+def test_create_rule_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_rule_set] = mock_rpc
+        request = {}
+        client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_rule_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = RuleSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.create_rule_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.create_rule_set] = mock_rpc
+
+        request = {}
+        await client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.create_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_rule_set_async(transport: str = 'grpc_asyncio', request_type=ruleset_service_request.CreateRuleSetRequest):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        response = await client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.CreateRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.asyncio
+async def test_create_rule_set_async_from_dict():
+    await test_create_rule_set_async(request_type=dict)
+
+def test_create_rule_set_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.CreateRuleSetRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_create_rule_set_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.CreateRuleSetRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        await client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_create_rule_set_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_rule_set(
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].rule_set
+        mock_val = rule_engine.RuleSet(name='name_value')
+        assert arg == mock_val
+
+
+def test_create_rule_set_flattened_error():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_rule_set(
+            ruleset_service_request.CreateRuleSetRequest(),
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_create_rule_set_flattened_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_rule_set(
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].rule_set
+        mock_val = rule_engine.RuleSet(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_create_rule_set_flattened_error_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_rule_set(
+            ruleset_service_request.CreateRuleSetRequest(),
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.GetRuleSetRequest,
+  dict,
+])
+def test_get_rule_set(request_type, transport: str = 'grpc'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        )
+        response = client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.GetRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+def test_get_rule_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = ruleset_service_request.GetRuleSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.get_rule_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == ruleset_service_request.GetRuleSetRequest(
+            name='name_value',
+        )
+
+def test_get_rule_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_rule_set] = mock_rpc
+        request = {}
+        client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_rule_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = RuleSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.get_rule_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.get_rule_set] = mock_rpc
+
+        request = {}
+        await client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.get_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_rule_set_async(transport: str = 'grpc_asyncio', request_type=ruleset_service_request.GetRuleSetRequest):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        response = await client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.GetRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.asyncio
+async def test_get_rule_set_async_from_dict():
+    await test_get_rule_set_async(request_type=dict)
+
+def test_get_rule_set_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.GetRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_get_rule_set_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.GetRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        await client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_get_rule_set_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_rule_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_get_rule_set_flattened_error():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_rule_set(
+            ruleset_service_request.GetRuleSetRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_get_rule_set_flattened_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_rule_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_get_rule_set_flattened_error_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_rule_set(
+            ruleset_service_request.GetRuleSetRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.UpdateRuleSetRequest,
+  dict,
+])
+def test_update_rule_set(request_type, transport: str = 'grpc'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        )
+        response = client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.UpdateRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+def test_update_rule_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = ruleset_service_request.UpdateRuleSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.update_rule_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == ruleset_service_request.UpdateRuleSetRequest(
+            name='name_value',
+        )
+
+def test_update_rule_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_rule_set] = mock_rpc
+        request = {}
+        client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_rule_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = RuleSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.update_rule_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.update_rule_set] = mock_rpc
+
+        request = {}
+        await client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.update_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_rule_set_async(transport: str = 'grpc_asyncio', request_type=ruleset_service_request.UpdateRuleSetRequest):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        response = await client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.UpdateRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.asyncio
+async def test_update_rule_set_async_from_dict():
+    await test_update_rule_set_async(request_type=dict)
+
+def test_update_rule_set_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.UpdateRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_update_rule_set_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.UpdateRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        await client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_update_rule_set_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.update_rule_set(
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].rule_set
+        mock_val = rule_engine.RuleSet(name='name_value')
+        assert arg == mock_val
+
+
+def test_update_rule_set_flattened_error():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_rule_set(
+            ruleset_service_request.UpdateRuleSetRequest(),
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_update_rule_set_flattened_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = rule_engine.RuleSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.update_rule_set(
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].rule_set
+        mock_val = rule_engine.RuleSet(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_update_rule_set_flattened_error_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.update_rule_set(
+            ruleset_service_request.UpdateRuleSetRequest(),
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.DeleteRuleSetRequest,
+  dict,
+])
+def test_delete_rule_set(request_type, transport: str = 'grpc'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.DeleteRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_rule_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = ruleset_service_request.DeleteRuleSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.delete_rule_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == ruleset_service_request.DeleteRuleSetRequest(
+            name='name_value',
+        )
+
+def test_delete_rule_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_rule_set] = mock_rpc
+        request = {}
+        client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = RuleSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.delete_rule_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.delete_rule_set] = mock_rpc
+
+        request = {}
+        await client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.delete_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_async(transport: str = 'grpc_asyncio', request_type=ruleset_service_request.DeleteRuleSetRequest):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.DeleteRuleSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_async_from_dict():
+    await test_delete_rule_set_async(request_type=dict)
+
+def test_delete_rule_set_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.DeleteRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.DeleteRuleSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_delete_rule_set_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_rule_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_delete_rule_set_flattened_error():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_rule_set(
+            ruleset_service_request.DeleteRuleSetRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_flattened_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_rule_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_delete_rule_set_flattened_error_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_rule_set(
+            ruleset_service_request.DeleteRuleSetRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.ListRuleSetsRequest,
+  dict,
+])
+def test_list_rule_sets(request_type, transport: str = 'grpc'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = ruleset_service_request.ListRuleSetsResponse(
+            next_page_token='next_page_token_value',
+        )
+        response = client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.ListRuleSetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListRuleSetsPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+def test_list_rule_sets_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = ruleset_service_request.ListRuleSetsRequest(
+        parent='parent_value',
+        page_token='page_token_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.list_rule_sets(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == ruleset_service_request.ListRuleSetsRequest(
+            parent='parent_value',
+            page_token='page_token_value',
+        )
+
+def test_list_rule_sets_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_rule_sets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_rule_sets] = mock_rpc
+        request = {}
+        client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_rule_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = RuleSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.list_rule_sets in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.list_rule_sets] = mock_rpc
+
+        request = {}
+        await client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.list_rule_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_async(transport: str = 'grpc_asyncio', request_type=ruleset_service_request.ListRuleSetsRequest):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(ruleset_service_request.ListRuleSetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        response = await client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = ruleset_service_request.ListRuleSetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListRuleSetsAsyncPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_async_from_dict():
+    await test_list_rule_sets_async(request_type=dict)
+
+def test_list_rule_sets_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.ListRuleSetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        call.return_value = ruleset_service_request.ListRuleSetsResponse()
+        client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = ruleset_service_request.ListRuleSetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ruleset_service_request.ListRuleSetsResponse())
+        await client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_list_rule_sets_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = ruleset_service_request.ListRuleSetsResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_rule_sets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_list_rule_sets_flattened_error():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_rule_sets(
+            ruleset_service_request.ListRuleSetsRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_flattened_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = ruleset_service_request.ListRuleSetsResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ruleset_service_request.ListRuleSetsResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_rule_sets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_flattened_error_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_rule_sets(
+            ruleset_service_request.ListRuleSetsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_rule_sets_pager(transport_name: str = "grpc"):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='abc',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[],
+                next_page_token='def',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        expected_metadata = ()
+        retry = retries.Retry()
+        timeout = 5
+        expected_metadata = tuple(expected_metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ('parent', ''),
+            )),
+        )
+        pager = client.list_rule_sets(request={}, retry=retry, timeout=timeout)
+
+        assert pager._metadata == expected_metadata
+        assert pager._retry == retry
+        assert pager._timeout == timeout
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, rule_engine.RuleSet)
+                   for i in results)
+def test_list_rule_sets_pages(transport_name: str = "grpc"):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='abc',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[],
+                next_page_token='def',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_rule_sets(request={}).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_async_pager():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='abc',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[],
+                next_page_token='def',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_rule_sets(request={},)
+        assert async_pager.next_page_token == 'abc'
+        responses = []
+        async for response in async_pager: # pragma: no branch
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(isinstance(i, rule_engine.RuleSet)
+                for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_rule_sets_async_pages():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='abc',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[],
+                next_page_token='def',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+        # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+        async for page_ in ( # pragma: no branch
+            await client.list_rule_sets(request={})
+        ).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_create_rule_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_rule_set] = mock_rpc
+
+        request = {}
+        client.create_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_create_rule_set_rest_required_fields(request_type=ruleset_service_request.CreateRuleSetRequest):
+    transport_class = transports.RuleSetServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = rule_engine.RuleSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = rule_engine.RuleSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.create_rule_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_create_rule_set_rest_unset_required_fields():
+    transport = transports.RuleSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.create_rule_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", "ruleSet", )))
+
+
+def test_create_rule_set_rest_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.create_rule_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/ruleSets" % client.transport._host, args[1])
+
+
+def test_create_rule_set_rest_flattened_error(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_rule_set(
+            ruleset_service_request.CreateRuleSetRequest(),
+            parent='parent_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+
+def test_get_rule_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_rule_set] = mock_rpc
+
+        request = {}
+        client.get_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_get_rule_set_rest_required_fields(request_type=ruleset_service_request.GetRuleSetRequest):
+    transport_class = transports.RuleSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = rule_engine.RuleSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = rule_engine.RuleSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.get_rule_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_get_rule_set_rest_unset_required_fields():
+    transport = transports.RuleSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.get_rule_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_get_rule_set_rest_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.get_rule_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, args[1])
+
+
+def test_get_rule_set_rest_flattened_error(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_rule_set(
+            ruleset_service_request.GetRuleSetRequest(),
+            name='name_value',
+        )
+
+
+def test_update_rule_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_rule_set] = mock_rpc
+
+        request = {}
+        client.update_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_update_rule_set_rest_required_fields(request_type=ruleset_service_request.UpdateRuleSetRequest):
+    transport_class = transports.RuleSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = rule_engine.RuleSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "patch",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = rule_engine.RuleSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.update_rule_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_update_rule_set_rest_unset_required_fields():
+    transport = transports.RuleSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.update_rule_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", "ruleSet", )))
+
+
+def test_update_rule_set_rest_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.update_rule_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, args[1])
+
+
+def test_update_rule_set_rest_flattened_error(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_rule_set(
+            ruleset_service_request.UpdateRuleSetRequest(),
+            name='name_value',
+            rule_set=rule_engine.RuleSet(name='name_value'),
+        )
+
+
+def test_delete_rule_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_rule_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_rule_set] = mock_rpc
+
+        request = {}
+        client.delete_rule_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_rule_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_delete_rule_set_rest_required_fields(request_type=ruleset_service_request.DeleteRuleSetRequest):
+    transport_class = transports.RuleSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_rule_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = None
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "delete",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = ''
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.delete_rule_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_delete_rule_set_rest_unset_required_fields():
+    transport = transports.RuleSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.delete_rule_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_delete_rule_set_rest_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.delete_rule_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/ruleSets/*}" % client.transport._host, args[1])
+
+
+def test_delete_rule_set_rest_flattened_error(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_rule_set(
+            ruleset_service_request.DeleteRuleSetRequest(),
+            name='name_value',
+        )
+
+
+def test_list_rule_sets_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_rule_sets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_rule_sets] = mock_rpc
+
+        request = {}
+        client.list_rule_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_rule_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_list_rule_sets_rest_required_fields(request_type=ruleset_service_request.ListRuleSetsRequest):
+    transport_class = transports.RuleSetServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_rule_sets._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_rule_sets._get_unset_required_fields(jsonified_request)
+    # Check that path parameters and body parameters are not mixing in.
+    assert not set(unset_fields) - set(("page_size", "page_token", ))
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = ruleset_service_request.ListRuleSetsResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = ruleset_service_request.ListRuleSetsResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.list_rule_sets(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_list_rule_sets_rest_unset_required_fields():
+    transport = transports.RuleSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.list_rule_sets._get_unset_required_fields({})
+    assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", )))
+
+
+def test_list_rule_sets_rest_flattened():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = ruleset_service_request.ListRuleSetsResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = ruleset_service_request.ListRuleSetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.list_rule_sets(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/ruleSets" % client.transport._host, args[1])
+
+
+def test_list_rule_sets_rest_flattened_error(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_rule_sets(
+            ruleset_service_request.ListRuleSetsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_rule_sets_rest_pager(transport: str = 'rest'):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # TODO(kbandes): remove this mock unless there's a good reason for it.
+        #with mock.patch.object(path_template, 'transcode') as transcode:
+        # Set the response as a series of pages
+        response = (
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='abc',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[],
+                next_page_token='def',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            ruleset_service_request.ListRuleSetsResponse(
+                rule_sets=[
+                    rule_engine.RuleSet(),
+                    rule_engine.RuleSet(),
+                ],
+            ),
+        )
+        # Two responses for two calls
+        response = response + response
+
+        # Wrap the values into proper Response objs
+        response = tuple(ruleset_service_request.ListRuleSetsResponse.to_json(x) for x in response)
+        return_values = tuple(Response() for i in response)
+        for return_val, response_val in zip(return_values, response):
+            return_val._content = response_val.encode('UTF-8')
+            return_val.status_code = 200
+        req.side_effect = return_values
+
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        pager = client.list_rule_sets(request=sample_request)
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, rule_engine.RuleSet)
+                for i in results)
+
+        pages = list(client.list_rule_sets(request=sample_request).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = RuleSetServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = RuleSetServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = RuleSetServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = RuleSetServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = RuleSetServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.RuleSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.RuleSetServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.RuleSetServiceGrpcTransport,
+    transports.RuleSetServiceGrpcAsyncIOTransport,
+    transports.RuleSetServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = RuleSetServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_rule_set_empty_call_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.create_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.CreateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_rule_set_empty_call_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.get_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.GetRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_rule_set_empty_call_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        call.return_value = rule_engine.RuleSet()
+        client.update_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.UpdateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_rule_set_empty_call_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.DeleteRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_rule_sets_empty_call_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        call.return_value = ruleset_service_request.ListRuleSetsResponse()
+        client.list_rule_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.ListRuleSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = RuleSetServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_rule_set_empty_call_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        await client.create_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.CreateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_rule_set_empty_call_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        await client.get_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.GetRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_rule_set_empty_call_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(rule_engine.RuleSet(
+            name='name_value',
+            description='description_value',
+            source='source_value',
+        ))
+        await client.update_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.UpdateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_rule_set_empty_call_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.DeleteRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_rule_sets_empty_call_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ruleset_service_request.ListRuleSetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        await client.list_rule_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.ListRuleSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = RuleSetServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_create_rule_set_rest_bad_request(request_type=ruleset_service_request.CreateRuleSetRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.create_rule_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.CreateRuleSetRequest,
+  dict,
+])
+def test_create_rule_set_rest_call_success(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request_init["rule_set"] = {'name': 'name_value', 'description': 'description_value', 'source': 'source_value', 'rules': [{'description': 'description_value', 'rule_id': 'rule_id_value', 'trigger_type': 1, 'condition': 'condition_value', 'actions': [{'action_id': 'action_id_value', 'access_control': {'operation_type': 1, 'policy': {'version': 774, 'bindings': [{'role': 'role_value', 'members': ['members_value1', 'members_value2'], 'condition': {'expression': 'expression_value', 'title': 'title_value', 'description': 'description_value', 'location': 'location_value'}}], 'audit_configs': [{'service': 'service_value', 'audit_log_configs': [{'log_type': 1, 'exempted_members': ['exempted_members_value1', 'exempted_members_value2']}]}], 'etag': b'etag_blob'}}, 'data_validation': {'conditions': {}}, 'data_update': {'entries': {}}, 'add_to_folder': {'folders': ['folders_value1', 'folders_value2']}, 'publish_to_pub_sub': {'topic_id': 'topic_id_value', 'messages': ['messages_value1', 'messages_value2']}, 'remove_from_folder_action': {'condition': 'condition_value', 'folder': 'folder_value'}, 'delete_document_action': {'enable_hard_delete': True}}]}]}
+    # The version of a generated dependency at test runtime may differ from the version used during generation.
+    # Delete any fields which are not present in the current runtime dependency
+    # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+    # Determine if the message type is proto-plus or protobuf
+    test_field = ruleset_service_request.CreateRuleSetRequest.meta.fields["rule_set"]
+
+    def get_message_fields(field):
+        # Given a field which is a message (composite type), return a list with
+        # all the fields of the message.
+        # If the field is not a composite type, return an empty list.
+        message_fields = []
+
+        if hasattr(field, "message") and field.message:
+            is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+            if is_field_type_proto_plus_type:
+                message_fields = field.message.meta.fields.values()
+            # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+            else: # pragma: NO COVER
+                message_fields = field.message.DESCRIPTOR.fields
+        return message_fields
+
+    runtime_nested_fields = [
+        (field.name, nested_field.name)
+        for field in get_message_fields(test_field)
+        for nested_field in get_message_fields(field)
+    ]
+
+    subfields_not_in_runtime = []
+
+    # For each item in the sample request, create a list of sub fields which are not present at runtime
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for field, value in request_init["rule_set"].items(): # pragma: NO COVER
+        result = None
+        is_repeated = False
+        # For repeated fields
+        if isinstance(value, list) and len(value):
+            is_repeated = True
+            result = value[0]
+        # For fields where the type is another message
+        if isinstance(value, dict):
+            result = value
+
+        if result and hasattr(result, "keys"):
+            for subfield in result.keys():
+                if (field, subfield) not in runtime_nested_fields:
+                    subfields_not_in_runtime.append(
+                        {"field": field, "subfield": subfield, "is_repeated": is_repeated}
+                    )
+
+    # Remove fields from the sample request which are not present in the runtime version of the dependency
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+        field = subfield_to_delete.get("field")
+        field_repeated = subfield_to_delete.get("is_repeated")
+        subfield = subfield_to_delete.get("subfield")
+        if subfield:
+            if field_repeated:
+                for i in range(0, len(request_init["rule_set"][field])):
+                    del request_init["rule_set"][field][i][subfield]
+            else:
+                del request_init["rule_set"][field][subfield]
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet(
+              name='name_value',
+              description='description_value',
+              source='source_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.create_rule_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_rule_set_rest_interceptors(null_interceptor):
+    transport = transports.RuleSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.RuleSetServiceRestInterceptor(),
+        )
+    client = RuleSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "post_create_rule_set") as post, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "pre_create_rule_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = ruleset_service_request.CreateRuleSetRequest.pb(ruleset_service_request.CreateRuleSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = rule_engine.RuleSet.to_json(rule_engine.RuleSet())
+        req.return_value.content = return_value
+
+        request = ruleset_service_request.CreateRuleSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = rule_engine.RuleSet()
+
+        client.create_rule_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_rule_set_rest_bad_request(request_type=ruleset_service_request.GetRuleSetRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.get_rule_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.GetRuleSetRequest,
+  dict,
+])
+def test_get_rule_set_rest_call_success(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet(
+              name='name_value',
+              description='description_value',
+              source='source_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.get_rule_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_rule_set_rest_interceptors(null_interceptor):
+    transport = transports.RuleSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.RuleSetServiceRestInterceptor(),
+        )
+    client = RuleSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "post_get_rule_set") as post, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "pre_get_rule_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = ruleset_service_request.GetRuleSetRequest.pb(ruleset_service_request.GetRuleSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = rule_engine.RuleSet.to_json(rule_engine.RuleSet())
+        req.return_value.content = return_value
+
+        request = ruleset_service_request.GetRuleSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = rule_engine.RuleSet()
+
+        client.get_rule_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_update_rule_set_rest_bad_request(request_type=ruleset_service_request.UpdateRuleSetRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.update_rule_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.UpdateRuleSetRequest,
+  dict,
+])
+def test_update_rule_set_rest_call_success(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = rule_engine.RuleSet(
+              name='name_value',
+              description='description_value',
+              source='source_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = rule_engine.RuleSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.update_rule_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, rule_engine.RuleSet)
+    assert response.name == 'name_value'
+    assert response.description == 'description_value'
+    assert response.source == 'source_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_update_rule_set_rest_interceptors(null_interceptor):
+    transport = transports.RuleSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.RuleSetServiceRestInterceptor(),
+        )
+    client = RuleSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "post_update_rule_set") as post, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "pre_update_rule_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = ruleset_service_request.UpdateRuleSetRequest.pb(ruleset_service_request.UpdateRuleSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = rule_engine.RuleSet.to_json(rule_engine.RuleSet())
+        req.return_value.content = return_value
+
+        request = ruleset_service_request.UpdateRuleSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = rule_engine.RuleSet()
+
+        client.update_rule_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_delete_rule_set_rest_bad_request(request_type=ruleset_service_request.DeleteRuleSetRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.delete_rule_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.DeleteRuleSetRequest,
+  dict,
+])
+def test_delete_rule_set_rest_call_success(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/ruleSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.delete_rule_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_rule_set_rest_interceptors(null_interceptor):
+    transport = transports.RuleSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.RuleSetServiceRestInterceptor(),
+        )
+    client = RuleSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "pre_delete_rule_set") as pre:
+        pre.assert_not_called()
+        pb_message = ruleset_service_request.DeleteRuleSetRequest.pb(ruleset_service_request.DeleteRuleSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+
+        request = ruleset_service_request.DeleteRuleSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+
+        client.delete_rule_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+
+
+def test_list_rule_sets_rest_bad_request(request_type=ruleset_service_request.ListRuleSetsRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.list_rule_sets(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  ruleset_service_request.ListRuleSetsRequest,
+  dict,
+])
+def test_list_rule_sets_rest_call_success(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = ruleset_service_request.ListRuleSetsResponse(
+              next_page_token='next_page_token_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = ruleset_service_request.ListRuleSetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.list_rule_sets(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListRuleSetsPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_rule_sets_rest_interceptors(null_interceptor):
+    transport = transports.RuleSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.RuleSetServiceRestInterceptor(),
+        )
+    client = RuleSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "post_list_rule_sets") as post, \
+        mock.patch.object(transports.RuleSetServiceRestInterceptor, "pre_list_rule_sets") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = ruleset_service_request.ListRuleSetsRequest.pb(ruleset_service_request.ListRuleSetsRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = ruleset_service_request.ListRuleSetsResponse.to_json(ruleset_service_request.ListRuleSetsResponse())
+        req.return_value.content = return_value
+
+        request = ruleset_service_request.ListRuleSetsRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = ruleset_service_request.ListRuleSetsResponse()
+
+        client.list_rule_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_rule_set_empty_call_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_rule_set),
+            '__call__') as call:
+        client.create_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.CreateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_rule_set_empty_call_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_rule_set),
+            '__call__') as call:
+        client.get_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.GetRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_rule_set_empty_call_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_rule_set),
+            '__call__') as call:
+        client.update_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.UpdateRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_rule_set_empty_call_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_rule_set),
+            '__call__') as call:
+        client.delete_rule_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.DeleteRuleSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_rule_sets_empty_call_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_rule_sets),
+            '__call__') as call:
+        client.list_rule_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = ruleset_service_request.ListRuleSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.RuleSetServiceGrpcTransport,
+    )
+
+def test_rule_set_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.RuleSetServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_rule_set_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.RuleSetServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'create_rule_set',
+        'get_rule_set',
+        'update_rule_set',
+        'delete_rule_set',
+        'list_rule_sets',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_rule_set_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.RuleSetServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_rule_set_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.rule_set_service.transports.RuleSetServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.RuleSetServiceTransport()
+        adc.assert_called_once()
+
+
+def test_rule_set_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        RuleSetServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.RuleSetServiceGrpcTransport,
+        transports.RuleSetServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_rule_set_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.RuleSetServiceGrpcTransport,
+        transports.RuleSetServiceGrpcAsyncIOTransport,
+        transports.RuleSetServiceRestTransport,
+    ],
+)
+def test_rule_set_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.RuleSetServiceGrpcTransport, grpc_helpers),
+        (transports.RuleSetServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_rule_set_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.RuleSetServiceGrpcTransport, transports.RuleSetServiceGrpcAsyncIOTransport])
+def test_rule_set_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_rule_set_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.RuleSetServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_rule_set_service_host_no_port(transport_name):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_rule_set_service_host_with_port(transport_name):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_rule_set_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = RuleSetServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = RuleSetServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.create_rule_set._session
+    session2 = client2.transport.create_rule_set._session
+    assert session1 != session2
+    session1 = client1.transport.get_rule_set._session
+    session2 = client2.transport.get_rule_set._session
+    assert session1 != session2
+    session1 = client1.transport.update_rule_set._session
+    session2 = client2.transport.update_rule_set._session
+    assert session1 != session2
+    session1 = client1.transport.delete_rule_set._session
+    session2 = client2.transport.delete_rule_set._session
+    assert session1 != session2
+    session1 = client1.transport.list_rule_sets._session
+    session2 = client2.transport.list_rule_sets._session
+    assert session1 != session2
+def test_rule_set_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.RuleSetServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_rule_set_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.RuleSetServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.RuleSetServiceGrpcTransport, transports.RuleSetServiceGrpcAsyncIOTransport])
+def test_rule_set_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.RuleSetServiceGrpcTransport, transports.RuleSetServiceGrpcAsyncIOTransport])
+def test_rule_set_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_document_path():
+    project = "squid"
+    location = "clam"
+    document = "whelk"
+    expected = "projects/{project}/locations/{location}/documents/{document}".format(project=project, location=location, document=document, )
+    actual = RuleSetServiceClient.document_path(project, location, document)
+    assert expected == actual
+
+
+def test_parse_document_path():
+    expected = {
+        "project": "octopus",
+        "location": "oyster",
+        "document": "nudibranch",
+    }
+    path = RuleSetServiceClient.document_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_document_path(path)
+    assert expected == actual
+
+def test_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = RuleSetServiceClient.location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = RuleSetServiceClient.location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_location_path(path)
+    assert expected == actual
+
+def test_rule_set_path():
+    project = "scallop"
+    location = "abalone"
+    rule_set = "squid"
+    expected = "projects/{project}/locations/{location}/ruleSets/{rule_set}".format(project=project, location=location, rule_set=rule_set, )
+    actual = RuleSetServiceClient.rule_set_path(project, location, rule_set)
+    assert expected == actual
+
+
+def test_parse_rule_set_path():
+    expected = {
+        "project": "clam",
+        "location": "whelk",
+        "rule_set": "octopus",
+    }
+    path = RuleSetServiceClient.rule_set_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_rule_set_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "oyster"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = RuleSetServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "nudibranch",
+    }
+    path = RuleSetServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "cuttlefish"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = RuleSetServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "mussel",
+    }
+    path = RuleSetServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "winkle"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = RuleSetServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "nautilus",
+    }
+    path = RuleSetServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "scallop"
+    expected = "projects/{project}".format(project=project, )
+    actual = RuleSetServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "abalone",
+    }
+    path = RuleSetServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "squid"
+    location = "clam"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = RuleSetServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "whelk",
+        "location": "octopus",
+    }
+    path = RuleSetServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = RuleSetServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.RuleSetServiceTransport, '_prep_wrapped_messages') as prep:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.RuleSetServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = RuleSetServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = RuleSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = RuleSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = RuleSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (RuleSetServiceClient, transports.RuleSetServiceGrpcTransport),
+    (RuleSetServiceAsyncClient, transports.RuleSetServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
diff --git a/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py
new file mode 100644
index 000000000000..4f5a11fd40f0
--- /dev/null
+++ b/owl-bot-staging/google-cloud-contentwarehouse/v1/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py
@@ -0,0 +1,5353 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+# try/except added for compatibility with python < 3.8
+try:
+    from unittest import mock
+    from unittest.mock import AsyncMock  # pragma: NO COVER
+except ImportError:  # pragma: NO COVER
+    import mock
+
+import grpc
+from grpc.experimental import aio
+from collections.abc import Iterable, AsyncIterable
+from google.protobuf import json_format
+import json
+import math
+import pytest
+from google.api_core import api_core_version
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+from proto.marshal.rules import wrappers
+from requests import Response
+from requests import Request, PreparedRequest
+from requests.sessions import Session
+from google.protobuf import json_format
+
+try:
+    from google.auth.aio import credentials as ga_credentials_async
+    HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+    HAS_GOOGLE_AUTH_AIO = False
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import path_template
+from google.api_core import retry as retries
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import SynonymSetServiceAsyncClient
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import SynonymSetServiceClient
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import pagers
+from google.cloud.contentwarehouse_v1.services.synonym_set_service import transports
+from google.cloud.contentwarehouse_v1.types import synonymset
+from google.cloud.contentwarehouse_v1.types import synonymset_service_request
+from google.longrunning import operations_pb2 # type: ignore
+from google.oauth2 import service_account
+import google.auth
+
+
+async def mock_async_gen(data, chunk_size=1):
+    for i in range(0, len(data)):  # pragma: NO COVER
+        chunk = data[i : i + chunk_size]
+        yield chunk.encode("utf-8")
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+    if HAS_GOOGLE_AUTH_AIO:
+        return ga_credentials_async.AnonymousCredentials()
+    return ga_credentials.AnonymousCredentials()
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
+
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+    return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(None) is None
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
+    assert SynonymSetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+
+def test__read_environment_variables():
+    assert SynonymSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (True, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            SynonymSetServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (False, "never", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (False, "always", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (False, "auto", None)
+
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            SynonymSetServiceClient._read_environment_variables()
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+        assert SynonymSetServiceClient._read_environment_variables() == (False, "auto", "foo.com")
+
+def test__get_client_cert_source():
+    mock_provided_cert_source = mock.Mock()
+    mock_default_cert_source = mock.Mock()
+
+    assert SynonymSetServiceClient._get_client_cert_source(None, False) is None
+    assert SynonymSetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None
+    assert SynonymSetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source
+
+    with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+        with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source):
+            assert SynonymSetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source
+            assert SynonymSetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source
+
+@mock.patch.object(SynonymSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceClient))
+@mock.patch.object(SynonymSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceAsyncClient))
+def test__get_api_endpoint():
+    api_override = "foo.com"
+    mock_client_cert_source = mock.Mock()
+    default_universe = SynonymSetServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    assert SynonymSetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override
+    assert SynonymSetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SynonymSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert SynonymSetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint
+    assert SynonymSetServiceClient._get_api_endpoint(None, None, default_universe, "always") == SynonymSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert SynonymSetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SynonymSetServiceClient.DEFAULT_MTLS_ENDPOINT
+    assert SynonymSetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint
+    assert SynonymSetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint
+
+    with pytest.raises(MutualTLSChannelError) as excinfo:
+        SynonymSetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto")
+    assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com."
+
+
+def test__get_universe_domain():
+    client_universe_domain = "foo.com"
+    universe_domain_env = "bar.com"
+
+    assert SynonymSetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain
+    assert SynonymSetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env
+    assert SynonymSetServiceClient._get_universe_domain(None, None) == SynonymSetServiceClient._DEFAULT_UNIVERSE
+
+    with pytest.raises(ValueError) as excinfo:
+        SynonymSetServiceClient._get_universe_domain("", None)
+    assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"),
+])
+def test__validate_universe_domain(client_class, transport_class, transport_name):
+    client = client_class(
+        transport=transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+    )
+    assert client._validate_universe_domain() == True
+
+    # Test the case when universe is already validated.
+    assert client._validate_universe_domain() == True
+
+    if transport_name == "grpc":
+        # Test the case where credentials are provided by the
+        # `local_channel_credentials`. The default universes in both match.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        client = client_class(transport=transport_class(channel=channel))
+        assert client._validate_universe_domain() == True
+
+        # Test the case where credentials do not exist: e.g. a transport is provided
+        # with no credentials. Validation should still succeed because there is no
+        # mismatch with non-existent credentials.
+        channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+        transport=transport_class(channel=channel)
+        transport._credentials = None
+        client = client_class(transport=transport)
+        assert client._validate_universe_domain() == True
+
+    # TODO: This is needed to cater for older versions of google-auth
+    # Make this test unconditional once the minimum supported version of
+    # google-auth becomes 2.23.0 or higher.
+    google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]]
+    if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23):
+        credentials = ga_credentials.AnonymousCredentials()
+        credentials._universe_domain = "foo.com"
+        # Test the case when there is a universe mismatch from the credentials.
+        client = client_class(
+            transport=transport_class(credentials=credentials)
+        )
+        with pytest.raises(ValueError) as excinfo:
+            client._validate_universe_domain()
+        assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+        # Test the case when there is a universe mismatch from the client.
+        #
+        # TODO: Make this test unconditional once the minimum supported version of
+        # google-api-core becomes 2.15.0 or higher.
+        api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]]
+        if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15):
+            client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),))
+            with pytest.raises(ValueError) as excinfo:
+                client._validate_universe_domain()
+            assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default."
+
+    # Test that ValueError is raised if universe_domain is provided via client options and credentials is None
+    with pytest.raises(ValueError):
+        client._compare_universes("foo.bar", None)
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (SynonymSetServiceClient, "grpc"),
+    (SynonymSetServiceAsyncClient, "grpc_asyncio"),
+    (SynonymSetServiceClient, "rest"),
+])
+def test_synonym_set_service_client_from_service_account_info(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info, transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+@pytest.mark.parametrize("transport_class,transport_name", [
+    (transports.SynonymSetServiceGrpcTransport, "grpc"),
+    (transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (transports.SynonymSetServiceRestTransport, "rest"),
+])
+def test_synonym_set_service_client_service_account_always_use_jwt(transport_class, transport_name):
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class,transport_name", [
+    (SynonymSetServiceClient, "grpc"),
+    (SynonymSetServiceAsyncClient, "grpc_asyncio"),
+    (SynonymSetServiceClient, "rest"),
+])
+def test_synonym_set_service_client_from_service_account_file(client_class, transport_name):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == (
+            'contentwarehouse.googleapis.com:443'
+            if transport_name in ['grpc', 'grpc_asyncio']
+            else
+            'https://contentwarehouse.googleapis.com'
+        )
+
+
+def test_synonym_set_service_client_get_transport_class():
+    transport = SynonymSetServiceClient.get_transport_class()
+    available_transports = [
+        transports.SynonymSetServiceGrpcTransport,
+        transports.SynonymSetServiceRestTransport,
+    ]
+    assert transport in available_transports
+
+    transport = SynonymSetServiceClient.get_transport_class("grpc")
+    assert transport == transports.SynonymSetServiceGrpcTransport
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc"),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"),
+])
+@mock.patch.object(SynonymSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceClient))
+@mock.patch.object(SynonymSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceAsyncClient))
+def test_synonym_set_service_client_client_options(client_class, transport_class, transport_name):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(SynonymSetServiceClient, 'get_transport_class') as gtc:
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(SynonymSetServiceClient, 'get_transport_class') as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(transport=transport_name, client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(transport=transport_name)
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client = client_class(transport=transport_name)
+    assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+    # Check the case api_endpoint is provided
+    options = client_options.ClientOptions(api_audience="https://language.googleapis.com")
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience="https://language.googleapis.com"
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc", "true"),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc", "false"),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest", "true"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest", "false"),
+])
+@mock.patch.object(SynonymSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceClient))
+@mock.patch.object(SynonymSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceAsyncClient))
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_synonym_set_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
+        with mock.patch.object(transport_class, '__init__') as patched:
+            patched.return_value = None
+            client = client_class(client_options=options, transport=transport_name)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+                with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
+                    if use_client_cert_env == "false":
+                        expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE)
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class(transport=transport_name)
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                        api_audience=None,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
+        with mock.patch.object(transport_class, '__init__') as patched:
+            with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
+                patched.return_value = None
+                client = client_class(transport=transport_name)
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                    api_audience=None,
+                )
+
+
+@pytest.mark.parametrize("client_class", [
+    SynonymSetServiceClient, SynonymSetServiceAsyncClient
+])
+@mock.patch.object(SynonymSetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SynonymSetServiceClient))
+@mock.patch.object(SynonymSetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SynonymSetServiceAsyncClient))
+def test_synonym_set_service_client_get_mtls_endpoint_and_cert_source(client_class):
+    mock_client_cert_source = mock.Mock()
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source == mock_client_cert_source
+
+    # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+        mock_client_cert_source = mock.Mock()
+        mock_api_endpoint = "foo"
+        options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint)
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options)
+        assert api_endpoint == mock_api_endpoint
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+        assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+        assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False):
+            api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+            assert api_endpoint == client_class.DEFAULT_ENDPOINT
+            assert cert_source is None
+
+    # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
+            with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source):
+                api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
+                assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+                assert cert_source == mock_client_cert_source
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
+        with pytest.raises(ValueError) as excinfo:
+            client_class.get_mtls_endpoint_and_cert_source()
+
+        assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+
+@pytest.mark.parametrize("client_class", [
+    SynonymSetServiceClient, SynonymSetServiceAsyncClient
+])
+@mock.patch.object(SynonymSetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceClient))
+@mock.patch.object(SynonymSetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SynonymSetServiceAsyncClient))
+def test_synonym_set_service_client_client_api_endpoint(client_class):
+    mock_client_cert_source = client_cert_source_callback
+    api_override = "foo.com"
+    default_universe = SynonymSetServiceClient._DEFAULT_UNIVERSE
+    default_endpoint = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe)
+    mock_universe = "bar.com"
+    mock_endpoint = SynonymSetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe)
+
+    # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+    # use ClientOptions.api_endpoint as the api endpoint regardless.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+        with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"):
+            options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override)
+            client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+            assert client.api_endpoint == api_override
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+    # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+    # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        client = client_class(credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+    # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+    # and ClientOptions.universe_domain="bar.com",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+    options = client_options.ClientOptions()
+    universe_exists = hasattr(options, "universe_domain")
+    if universe_exists:
+        options = client_options.ClientOptions(universe_domain=mock_universe)
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    else:
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+    assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint)
+    assert client.universe_domain == (mock_universe if universe_exists else default_universe)
+
+    # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+    # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+    options = client_options.ClientOptions()
+    if hasattr(options, "universe_domain"):
+        delattr(options, "universe_domain")
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials())
+        assert client.api_endpoint == default_endpoint
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc"),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest"),
+])
+def test_synonym_set_service_client_client_options_scopes(client_class, transport_class, transport_name):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(
+        scopes=["1", "2"],
+    )
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc", grpc_helpers),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+    (SynonymSetServiceClient, transports.SynonymSetServiceRestTransport, "rest", None),
+])
+def test_synonym_set_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+def test_synonym_set_service_client_client_options_from_dict():
+    with mock.patch('google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceGrpcTransport.__init__') as grpc_transport:
+        grpc_transport.return_value = None
+        client = SynonymSetServiceClient(
+            client_options={'api_endpoint': 'squid.clam.whelk'}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+
+@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport, "grpc", grpc_helpers),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async),
+])
+def test_synonym_set_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(
+        credentials_file="credentials.json"
+    )
+
+    with mock.patch.object(transport_class, '__init__') as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+            api_audience=None,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=None,
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.CreateSynonymSetRequest,
+  dict,
+])
+def test_create_synonym_set(request_type, transport: str = 'grpc'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        )
+        response = client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.CreateSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+def test_create_synonym_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = synonymset_service_request.CreateSynonymSetRequest(
+        parent='parent_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.create_synonym_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == synonymset_service_request.CreateSynonymSetRequest(
+            parent='parent_value',
+        )
+
+def test_create_synonym_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_synonym_set] = mock_rpc
+        request = {}
+        client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.create_synonym_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.create_synonym_set] = mock_rpc
+
+        request = {}
+        await client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.create_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_async(transport: str = 'grpc_asyncio', request_type=synonymset_service_request.CreateSynonymSetRequest):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        response = await client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.CreateSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_async_from_dict():
+    await test_create_synonym_set_async(request_type=dict)
+
+def test_create_synonym_set_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.CreateSynonymSetRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.CreateSynonymSetRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        await client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_create_synonym_set_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_synonym_set(
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].synonym_set
+        mock_val = synonymset.SynonymSet(name='name_value')
+        assert arg == mock_val
+
+
+def test_create_synonym_set_flattened_error():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_synonym_set(
+            synonymset_service_request.CreateSynonymSetRequest(),
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_flattened_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_synonym_set(
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+        arg = args[0].synonym_set
+        mock_val = synonymset.SynonymSet(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_create_synonym_set_flattened_error_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_synonym_set(
+            synonymset_service_request.CreateSynonymSetRequest(),
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.GetSynonymSetRequest,
+  dict,
+])
+def test_get_synonym_set(request_type, transport: str = 'grpc'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        )
+        response = client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.GetSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+def test_get_synonym_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = synonymset_service_request.GetSynonymSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.get_synonym_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == synonymset_service_request.GetSynonymSetRequest(
+            name='name_value',
+        )
+
+def test_get_synonym_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_synonym_set] = mock_rpc
+        request = {}
+        client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.get_synonym_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.get_synonym_set] = mock_rpc
+
+        request = {}
+        await client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.get_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_async(transport: str = 'grpc_asyncio', request_type=synonymset_service_request.GetSynonymSetRequest):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        response = await client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.GetSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_async_from_dict():
+    await test_get_synonym_set_async(request_type=dict)
+
+def test_get_synonym_set_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.GetSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.GetSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        await client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_get_synonym_set_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_synonym_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_get_synonym_set_flattened_error():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_synonym_set(
+            synonymset_service_request.GetSynonymSetRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_flattened_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_synonym_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_get_synonym_set_flattened_error_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_synonym_set(
+            synonymset_service_request.GetSynonymSetRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.UpdateSynonymSetRequest,
+  dict,
+])
+def test_update_synonym_set(request_type, transport: str = 'grpc'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        )
+        response = client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.UpdateSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+def test_update_synonym_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = synonymset_service_request.UpdateSynonymSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.update_synonym_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == synonymset_service_request.UpdateSynonymSetRequest(
+            name='name_value',
+        )
+
+def test_update_synonym_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_synonym_set] = mock_rpc
+        request = {}
+        client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.update_synonym_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.update_synonym_set] = mock_rpc
+
+        request = {}
+        await client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.update_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_async(transport: str = 'grpc_asyncio', request_type=synonymset_service_request.UpdateSynonymSetRequest):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        response = await client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.UpdateSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_async_from_dict():
+    await test_update_synonym_set_async(request_type=dict)
+
+def test_update_synonym_set_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.UpdateSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.UpdateSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        await client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_update_synonym_set_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.update_synonym_set(
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].synonym_set
+        mock_val = synonymset.SynonymSet(name='name_value')
+        assert arg == mock_val
+
+
+def test_update_synonym_set_flattened_error():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_synonym_set(
+            synonymset_service_request.UpdateSynonymSetRequest(),
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_flattened_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset.SynonymSet()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.update_synonym_set(
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+        arg = args[0].synonym_set
+        mock_val = synonymset.SynonymSet(name='name_value')
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_update_synonym_set_flattened_error_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.update_synonym_set(
+            synonymset_service_request.UpdateSynonymSetRequest(),
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.DeleteSynonymSetRequest,
+  dict,
+])
+def test_delete_synonym_set(request_type, transport: str = 'grpc'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.DeleteSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_synonym_set_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = synonymset_service_request.DeleteSynonymSetRequest(
+        name='name_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.delete_synonym_set(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == synonymset_service_request.DeleteSynonymSetRequest(
+            name='name_value',
+        )
+
+def test_delete_synonym_set_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_synonym_set] = mock_rpc
+        request = {}
+        client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.delete_synonym_set in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.delete_synonym_set] = mock_rpc
+
+        request = {}
+        await client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.delete_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_async(transport: str = 'grpc_asyncio', request_type=synonymset_service_request.DeleteSynonymSetRequest):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.DeleteSynonymSetRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_async_from_dict():
+    await test_delete_synonym_set_async(request_type=dict)
+
+def test_delete_synonym_set_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.DeleteSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.DeleteSynonymSetRequest()
+
+    request.name = 'name_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'name=name_value',
+    ) in kw['metadata']
+
+
+def test_delete_synonym_set_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_synonym_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+
+def test_delete_synonym_set_flattened_error():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_synonym_set(
+            synonymset_service_request.DeleteSynonymSetRequest(),
+            name='name_value',
+        )
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_flattened_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_synonym_set(
+            name='name_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].name
+        mock_val = 'name_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_delete_synonym_set_flattened_error_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_synonym_set(
+            synonymset_service_request.DeleteSynonymSetRequest(),
+            name='name_value',
+        )
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.ListSynonymSetsRequest,
+  dict,
+])
+def test_list_synonym_sets(request_type, transport: str = 'grpc'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset_service_request.ListSynonymSetsResponse(
+            next_page_token='next_page_token_value',
+        )
+        response = client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.ListSynonymSetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListSynonymSetsPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+def test_list_synonym_sets_non_empty_request_with_auto_populated_field():
+    # This test is a coverage failsafe to make sure that UUID4 fields are
+    # automatically populated, according to AIP-4235, with non-empty requests.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='grpc',
+    )
+
+    # Populate all string fields in the request which are not UUID4
+    # since we want to check that UUID4 are populated automatically
+    # if they meet the requirements of AIP 4235.
+    request = synonymset_service_request.ListSynonymSetsRequest(
+        parent='parent_value',
+        page_token='page_token_value',
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client.list_synonym_sets(request=request)
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == synonymset_service_request.ListSynonymSetsRequest(
+            parent='parent_value',
+            page_token='page_token_value',
+        )
+
+def test_list_synonym_sets_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="grpc",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_synonym_sets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_synonym_sets] = mock_rpc
+        request = {}
+        client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_synonym_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceAsyncClient(
+            credentials=async_anonymous_credentials(),
+            transport=transport,
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._client._transport.list_synonym_sets in client._client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.AsyncMock()
+        mock_rpc.return_value = mock.Mock()
+        client._client._transport._wrapped_methods[client._client._transport.list_synonym_sets] = mock_rpc
+
+        request = {}
+        await client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        await client.list_synonym_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_async(transport: str = 'grpc_asyncio', request_type=synonymset_service_request.ListSynonymSetsRequest):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(synonymset_service_request.ListSynonymSetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        response = await client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        request = synonymset_service_request.ListSynonymSetsRequest()
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListSynonymSetsAsyncPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_async_from_dict():
+    await test_list_synonym_sets_async(request_type=dict)
+
+def test_list_synonym_sets_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.ListSynonymSetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        call.return_value = synonymset_service_request.ListSynonymSetsResponse()
+        client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = synonymset_service_request.ListSynonymSetsRequest()
+
+    request.parent = 'parent_value'
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset_service_request.ListSynonymSetsResponse())
+        await client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert (
+        'x-goog-request-params',
+        'parent=parent_value',
+    ) in kw['metadata']
+
+
+def test_list_synonym_sets_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset_service_request.ListSynonymSetsResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_synonym_sets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+
+def test_list_synonym_sets_flattened_error():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_synonym_sets(
+            synonymset_service_request.ListSynonymSetsRequest(),
+            parent='parent_value',
+        )
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_flattened_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = synonymset_service_request.ListSynonymSetsResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset_service_request.ListSynonymSetsResponse())
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_synonym_sets(
+            parent='parent_value',
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        arg = args[0].parent
+        mock_val = 'parent_value'
+        assert arg == mock_val
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_flattened_error_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_synonym_sets(
+            synonymset_service_request.ListSynonymSetsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_synonym_sets_pager(transport_name: str = "grpc"):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='abc',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[],
+                next_page_token='def',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        expected_metadata = ()
+        retry = retries.Retry()
+        timeout = 5
+        expected_metadata = tuple(expected_metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((
+                ('parent', ''),
+            )),
+        )
+        pager = client.list_synonym_sets(request={}, retry=retry, timeout=timeout)
+
+        assert pager._metadata == expected_metadata
+        assert pager._retry == retry
+        assert pager._timeout == timeout
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, synonymset.SynonymSet)
+                   for i in results)
+def test_list_synonym_sets_pages(transport_name: str = "grpc"):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport_name,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='abc',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[],
+                next_page_token='def',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_synonym_sets(request={}).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_async_pager():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='abc',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[],
+                next_page_token='def',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_synonym_sets(request={},)
+        assert async_pager.next_page_token == 'abc'
+        responses = []
+        async for response in async_pager: # pragma: no branch
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(isinstance(i, synonymset.SynonymSet)
+                for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_synonym_sets_async_pages():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__', new_callable=mock.AsyncMock) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='abc',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[],
+                next_page_token='def',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+        # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+        async for page_ in ( # pragma: no branch
+            await client.list_synonym_sets(request={})
+        ).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_create_synonym_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.create_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.create_synonym_set] = mock_rpc
+
+        request = {}
+        client.create_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.create_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_create_synonym_set_rest_required_fields(request_type=synonymset_service_request.CreateSynonymSetRequest):
+    transport_class = transports.SynonymSetServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = synonymset.SynonymSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "post",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = synonymset.SynonymSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.create_synonym_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_create_synonym_set_rest_unset_required_fields():
+    transport = transports.SynonymSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.create_synonym_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("parent", "synonymSet", )))
+
+
+def test_create_synonym_set_rest_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.create_synonym_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/synonymSets" % client.transport._host, args[1])
+
+
+def test_create_synonym_set_rest_flattened_error(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_synonym_set(
+            synonymset_service_request.CreateSynonymSetRequest(),
+            parent='parent_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+
+def test_get_synonym_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.get_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.get_synonym_set] = mock_rpc
+
+        request = {}
+        client.get_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.get_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_get_synonym_set_rest_required_fields(request_type=synonymset_service_request.GetSynonymSetRequest):
+    transport_class = transports.SynonymSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = synonymset.SynonymSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = synonymset.SynonymSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.get_synonym_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_get_synonym_set_rest_unset_required_fields():
+    transport = transports.SynonymSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.get_synonym_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_get_synonym_set_rest_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.get_synonym_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/synonymSets/*}" % client.transport._host, args[1])
+
+
+def test_get_synonym_set_rest_flattened_error(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_synonym_set(
+            synonymset_service_request.GetSynonymSetRequest(),
+            name='name_value',
+        )
+
+
+def test_update_synonym_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.update_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.update_synonym_set] = mock_rpc
+
+        request = {}
+        client.update_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.update_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_update_synonym_set_rest_required_fields(request_type=synonymset_service_request.UpdateSynonymSetRequest):
+    transport_class = transports.SynonymSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = synonymset.SynonymSet()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "patch",
+                'query_params': pb_request,
+            }
+            transcode_result['body'] = pb_request
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = synonymset.SynonymSet.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.update_synonym_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_update_synonym_set_rest_unset_required_fields():
+    transport = transports.SynonymSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.update_synonym_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", "synonymSet", )))
+
+
+def test_update_synonym_set_rest_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.update_synonym_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/synonymSets/*}" % client.transport._host, args[1])
+
+
+def test_update_synonym_set_rest_flattened_error(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.update_synonym_set(
+            synonymset_service_request.UpdateSynonymSetRequest(),
+            name='name_value',
+            synonym_set=synonymset.SynonymSet(name='name_value'),
+        )
+
+
+def test_delete_synonym_set_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.delete_synonym_set in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.delete_synonym_set] = mock_rpc
+
+        request = {}
+        client.delete_synonym_set(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.delete_synonym_set(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_delete_synonym_set_rest_required_fields(request_type=synonymset_service_request.DeleteSynonymSetRequest):
+    transport_class = transports.SynonymSetServiceRestTransport
+
+    request_init = {}
+    request_init["name"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["name"] = 'name_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_synonym_set._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "name" in jsonified_request
+    assert jsonified_request["name"] == 'name_value'
+
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = None
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "delete",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+            json_return_value = ''
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.delete_synonym_set(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_delete_synonym_set_rest_unset_required_fields():
+    transport = transports.SynonymSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.delete_synonym_set._get_unset_required_fields({})
+    assert set(unset_fields) == (set(()) & set(("name", )))
+
+
+def test_delete_synonym_set_rest_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            name='name_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.delete_synonym_set(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{name=projects/*/locations/*/synonymSets/*}" % client.transport._host, args[1])
+
+
+def test_delete_synonym_set_rest_flattened_error(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_synonym_set(
+            synonymset_service_request.DeleteSynonymSetRequest(),
+            name='name_value',
+        )
+
+
+def test_list_synonym_sets_rest_use_cached_wrapped_rpc():
+    # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+    # instead of constructing them on each call
+    with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport="rest",
+        )
+
+        # Should wrap all calls on client creation
+        assert wrapper_fn.call_count > 0
+        wrapper_fn.reset_mock()
+
+        # Ensure method has been cached
+        assert client._transport.list_synonym_sets in client._transport._wrapped_methods
+
+        # Replace cached wrapped function with mock
+        mock_rpc = mock.Mock()
+        mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string.
+        client._transport._wrapped_methods[client._transport.list_synonym_sets] = mock_rpc
+
+        request = {}
+        client.list_synonym_sets(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert mock_rpc.call_count == 1
+
+        client.list_synonym_sets(request)
+
+        # Establish that a new wrapper was not created for this call
+        assert wrapper_fn.call_count == 0
+        assert mock_rpc.call_count == 2
+
+
+def test_list_synonym_sets_rest_required_fields(request_type=synonymset_service_request.ListSynonymSetsRequest):
+    transport_class = transports.SynonymSetServiceRestTransport
+
+    request_init = {}
+    request_init["parent"] = ""
+    request = request_type(**request_init)
+    pb_request = request_type.pb(request)
+    jsonified_request = json.loads(json_format.MessageToJson(
+        pb_request,
+        use_integers_for_enums=False
+    ))
+
+    # verify fields with default values are dropped
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_synonym_sets._get_unset_required_fields(jsonified_request)
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with default values are now present
+
+    jsonified_request["parent"] = 'parent_value'
+
+    unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_synonym_sets._get_unset_required_fields(jsonified_request)
+    # Check that path parameters and body parameters are not mixing in.
+    assert not set(unset_fields) - set(("page_size", "page_token", ))
+    jsonified_request.update(unset_fields)
+
+    # verify required fields with non-default values are left alone
+    assert "parent" in jsonified_request
+    assert jsonified_request["parent"] == 'parent_value'
+
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport='rest',
+    )
+    request = request_type(**request_init)
+
+    # Designate an appropriate value for the returned response.
+    return_value = synonymset_service_request.ListSynonymSetsResponse()
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # We need to mock transcode() because providing default values
+        # for required fields will fail the real version if the http_options
+        # expect actual values for those fields.
+        with mock.patch.object(path_template, 'transcode') as transcode:
+            # A uri without fields and an empty body will force all the
+            # request fields to show up in the query_params.
+            pb_request = request_type.pb(request)
+            transcode_result = {
+                'uri': 'v1/sample_method',
+                'method': "get",
+                'query_params': pb_request,
+            }
+            transcode.return_value = transcode_result
+
+            response_value = Response()
+            response_value.status_code = 200
+
+            # Convert return value to protobuf type
+            return_value = synonymset_service_request.ListSynonymSetsResponse.pb(return_value)
+            json_return_value = json_format.MessageToJson(return_value)
+
+            response_value._content = json_return_value.encode('UTF-8')
+            req.return_value = response_value
+
+            response = client.list_synonym_sets(request)
+
+            expected_params = [
+                ('$alt', 'json;enum-encoding=int')
+            ]
+            actual_params = req.call_args.kwargs['params']
+            assert expected_params == actual_params
+
+
+def test_list_synonym_sets_rest_unset_required_fields():
+    transport = transports.SynonymSetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials)
+
+    unset_fields = transport.list_synonym_sets._get_unset_required_fields({})
+    assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", )))
+
+
+def test_list_synonym_sets_rest_flattened():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset_service_request.ListSynonymSetsResponse()
+
+        # get arguments that satisfy an http rule for this method
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        # get truthy value for each flattened field
+        mock_args = dict(
+            parent='parent_value',
+        )
+        mock_args.update(sample_request)
+
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        response_value.status_code = 200
+        # Convert return value to protobuf type
+        return_value = synonymset_service_request.ListSynonymSetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value._content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+
+        client.list_synonym_sets(**mock_args)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(req.mock_calls) == 1
+        _, args, _ = req.mock_calls[0]
+        assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/synonymSets" % client.transport._host, args[1])
+
+
+def test_list_synonym_sets_rest_flattened_error(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_synonym_sets(
+            synonymset_service_request.ListSynonymSetsRequest(),
+            parent='parent_value',
+        )
+
+
+def test_list_synonym_sets_rest_pager(transport: str = 'rest'):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport=transport,
+    )
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # TODO(kbandes): remove this mock unless there's a good reason for it.
+        #with mock.patch.object(path_template, 'transcode') as transcode:
+        # Set the response as a series of pages
+        response = (
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='abc',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[],
+                next_page_token='def',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                ],
+                next_page_token='ghi',
+            ),
+            synonymset_service_request.ListSynonymSetsResponse(
+                synonym_sets=[
+                    synonymset.SynonymSet(),
+                    synonymset.SynonymSet(),
+                ],
+            ),
+        )
+        # Two responses for two calls
+        response = response + response
+
+        # Wrap the values into proper Response objs
+        response = tuple(synonymset_service_request.ListSynonymSetsResponse.to_json(x) for x in response)
+        return_values = tuple(Response() for i in response)
+        for return_val, response_val in zip(return_values, response):
+            return_val._content = response_val.encode('UTF-8')
+            return_val.status_code = 200
+        req.side_effect = return_values
+
+        sample_request = {'parent': 'projects/sample1/locations/sample2'}
+
+        pager = client.list_synonym_sets(request=sample_request)
+
+        results = list(pager)
+        assert len(results) == 6
+        assert all(isinstance(i, synonymset.SynonymSet)
+                for i in results)
+
+        pages = list(client.list_synonym_sets(request=sample_request).pages)
+        for page_, token in zip(pages, ['abc','def','ghi', '']):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = SynonymSetServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = SynonymSetServiceClient(
+            client_options=options,
+            transport=transport,
+        )
+
+    # It is an error to provide an api_key and a credential.
+    options = client_options.ClientOptions()
+    options.api_key = "api_key"
+    with pytest.raises(ValueError):
+        client = SynonymSetServiceClient(
+            client_options=options,
+            credentials=ga_credentials.AnonymousCredentials()
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = SynonymSetServiceClient(
+            client_options={"scopes": ["1", "2"]},
+            transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = SynonymSetServiceClient(transport=transport)
+    assert client.transport is transport
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.SynonymSetServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+@pytest.mark.parametrize("transport_class", [
+    transports.SynonymSetServiceGrpcTransport,
+    transports.SynonymSetServiceGrpcAsyncIOTransport,
+    transports.SynonymSetServiceRestTransport,
+])
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, 'default') as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+def test_transport_kind_grpc():
+    transport = SynonymSetServiceClient.get_transport_class("grpc")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_synonym_set_empty_call_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.create_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.CreateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_synonym_set_empty_call_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.get_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.GetSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_synonym_set_empty_call_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        call.return_value = synonymset.SynonymSet()
+        client.update_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.UpdateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_synonym_set_empty_call_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        call.return_value = None
+        client.delete_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.DeleteSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_synonym_sets_empty_call_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        call.return_value = synonymset_service_request.ListSynonymSetsResponse()
+        client.list_synonym_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.ListSynonymSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+    transport = SynonymSetServiceAsyncClient.get_transport_class("grpc_asyncio")(
+        credentials=async_anonymous_credentials()
+    )
+    assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_synonym_set_empty_call_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        await client.create_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.CreateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_synonym_set_empty_call_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        await client.get_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.GetSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_synonym_set_empty_call_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset.SynonymSet(
+            name='name_value',
+            context='context_value',
+        ))
+        await client.update_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.UpdateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_synonym_set_empty_call_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.DeleteSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_synonym_sets_empty_call_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(synonymset_service_request.ListSynonymSetsResponse(
+            next_page_token='next_page_token_value',
+        ))
+        await client.list_synonym_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.ListSynonymSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_kind_rest():
+    transport = SynonymSetServiceClient.get_transport_class("rest")(
+        credentials=ga_credentials.AnonymousCredentials()
+    )
+    assert transport.kind == "rest"
+
+
+def test_create_synonym_set_rest_bad_request(request_type=synonymset_service_request.CreateSynonymSetRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.create_synonym_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.CreateSynonymSetRequest,
+  dict,
+])
+def test_create_synonym_set_rest_call_success(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request_init["synonym_set"] = {'name': 'name_value', 'context': 'context_value', 'synonyms': [{'words': ['words_value1', 'words_value2']}]}
+    # The version of a generated dependency at test runtime may differ from the version used during generation.
+    # Delete any fields which are not present in the current runtime dependency
+    # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+    # Determine if the message type is proto-plus or protobuf
+    test_field = synonymset_service_request.CreateSynonymSetRequest.meta.fields["synonym_set"]
+
+    def get_message_fields(field):
+        # Given a field which is a message (composite type), return a list with
+        # all the fields of the message.
+        # If the field is not a composite type, return an empty list.
+        message_fields = []
+
+        if hasattr(field, "message") and field.message:
+            is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+            if is_field_type_proto_plus_type:
+                message_fields = field.message.meta.fields.values()
+            # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+            else: # pragma: NO COVER
+                message_fields = field.message.DESCRIPTOR.fields
+        return message_fields
+
+    runtime_nested_fields = [
+        (field.name, nested_field.name)
+        for field in get_message_fields(test_field)
+        for nested_field in get_message_fields(field)
+    ]
+
+    subfields_not_in_runtime = []
+
+    # For each item in the sample request, create a list of sub fields which are not present at runtime
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for field, value in request_init["synonym_set"].items(): # pragma: NO COVER
+        result = None
+        is_repeated = False
+        # For repeated fields
+        if isinstance(value, list) and len(value):
+            is_repeated = True
+            result = value[0]
+        # For fields where the type is another message
+        if isinstance(value, dict):
+            result = value
+
+        if result and hasattr(result, "keys"):
+            for subfield in result.keys():
+                if (field, subfield) not in runtime_nested_fields:
+                    subfields_not_in_runtime.append(
+                        {"field": field, "subfield": subfield, "is_repeated": is_repeated}
+                    )
+
+    # Remove fields from the sample request which are not present in the runtime version of the dependency
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+        field = subfield_to_delete.get("field")
+        field_repeated = subfield_to_delete.get("is_repeated")
+        subfield = subfield_to_delete.get("subfield")
+        if subfield:
+            if field_repeated:
+                for i in range(0, len(request_init["synonym_set"][field])):
+                    del request_init["synonym_set"][field][i][subfield]
+            else:
+                del request_init["synonym_set"][field][subfield]
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet(
+              name='name_value',
+              context='context_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.create_synonym_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_create_synonym_set_rest_interceptors(null_interceptor):
+    transport = transports.SynonymSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.SynonymSetServiceRestInterceptor(),
+        )
+    client = SynonymSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "post_create_synonym_set") as post, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "pre_create_synonym_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = synonymset_service_request.CreateSynonymSetRequest.pb(synonymset_service_request.CreateSynonymSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = synonymset.SynonymSet.to_json(synonymset.SynonymSet())
+        req.return_value.content = return_value
+
+        request = synonymset_service_request.CreateSynonymSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = synonymset.SynonymSet()
+
+        client.create_synonym_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_synonym_set_rest_bad_request(request_type=synonymset_service_request.GetSynonymSetRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.get_synonym_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.GetSynonymSetRequest,
+  dict,
+])
+def test_get_synonym_set_rest_call_success(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet(
+              name='name_value',
+              context='context_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.get_synonym_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_get_synonym_set_rest_interceptors(null_interceptor):
+    transport = transports.SynonymSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.SynonymSetServiceRestInterceptor(),
+        )
+    client = SynonymSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "post_get_synonym_set") as post, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "pre_get_synonym_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = synonymset_service_request.GetSynonymSetRequest.pb(synonymset_service_request.GetSynonymSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = synonymset.SynonymSet.to_json(synonymset.SynonymSet())
+        req.return_value.content = return_value
+
+        request = synonymset_service_request.GetSynonymSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = synonymset.SynonymSet()
+
+        client.get_synonym_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_update_synonym_set_rest_bad_request(request_type=synonymset_service_request.UpdateSynonymSetRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.update_synonym_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.UpdateSynonymSetRequest,
+  dict,
+])
+def test_update_synonym_set_rest_call_success(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request_init["synonym_set"] = {'name': 'name_value', 'context': 'context_value', 'synonyms': [{'words': ['words_value1', 'words_value2']}]}
+    # The version of a generated dependency at test runtime may differ from the version used during generation.
+    # Delete any fields which are not present in the current runtime dependency
+    # See https://github.com/googleapis/gapic-generator-python/issues/1748
+
+    # Determine if the message type is proto-plus or protobuf
+    test_field = synonymset_service_request.UpdateSynonymSetRequest.meta.fields["synonym_set"]
+
+    def get_message_fields(field):
+        # Given a field which is a message (composite type), return a list with
+        # all the fields of the message.
+        # If the field is not a composite type, return an empty list.
+        message_fields = []
+
+        if hasattr(field, "message") and field.message:
+            is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR")
+
+            if is_field_type_proto_plus_type:
+                message_fields = field.message.meta.fields.values()
+            # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types
+            else: # pragma: NO COVER
+                message_fields = field.message.DESCRIPTOR.fields
+        return message_fields
+
+    runtime_nested_fields = [
+        (field.name, nested_field.name)
+        for field in get_message_fields(test_field)
+        for nested_field in get_message_fields(field)
+    ]
+
+    subfields_not_in_runtime = []
+
+    # For each item in the sample request, create a list of sub fields which are not present at runtime
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for field, value in request_init["synonym_set"].items(): # pragma: NO COVER
+        result = None
+        is_repeated = False
+        # For repeated fields
+        if isinstance(value, list) and len(value):
+            is_repeated = True
+            result = value[0]
+        # For fields where the type is another message
+        if isinstance(value, dict):
+            result = value
+
+        if result and hasattr(result, "keys"):
+            for subfield in result.keys():
+                if (field, subfield) not in runtime_nested_fields:
+                    subfields_not_in_runtime.append(
+                        {"field": field, "subfield": subfield, "is_repeated": is_repeated}
+                    )
+
+    # Remove fields from the sample request which are not present in the runtime version of the dependency
+    # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime
+    for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER
+        field = subfield_to_delete.get("field")
+        field_repeated = subfield_to_delete.get("is_repeated")
+        subfield = subfield_to_delete.get("subfield")
+        if subfield:
+            if field_repeated:
+                for i in range(0, len(request_init["synonym_set"][field])):
+                    del request_init["synonym_set"][field][i][subfield]
+            else:
+                del request_init["synonym_set"][field][subfield]
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset.SynonymSet(
+              name='name_value',
+              context='context_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = synonymset.SynonymSet.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.update_synonym_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, synonymset.SynonymSet)
+    assert response.name == 'name_value'
+    assert response.context == 'context_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_update_synonym_set_rest_interceptors(null_interceptor):
+    transport = transports.SynonymSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.SynonymSetServiceRestInterceptor(),
+        )
+    client = SynonymSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "post_update_synonym_set") as post, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "pre_update_synonym_set") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = synonymset_service_request.UpdateSynonymSetRequest.pb(synonymset_service_request.UpdateSynonymSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = synonymset.SynonymSet.to_json(synonymset.SynonymSet())
+        req.return_value.content = return_value
+
+        request = synonymset_service_request.UpdateSynonymSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = synonymset.SynonymSet()
+
+        client.update_synonym_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_delete_synonym_set_rest_bad_request(request_type=synonymset_service_request.DeleteSynonymSetRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.delete_synonym_set(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.DeleteSynonymSetRequest,
+  dict,
+])
+def test_delete_synonym_set_rest_call_success(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'name': 'projects/sample1/locations/sample2/synonymSets/sample3'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = None
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = ''
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.delete_synonym_set(request)
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_delete_synonym_set_rest_interceptors(null_interceptor):
+    transport = transports.SynonymSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.SynonymSetServiceRestInterceptor(),
+        )
+    client = SynonymSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "pre_delete_synonym_set") as pre:
+        pre.assert_not_called()
+        pb_message = synonymset_service_request.DeleteSynonymSetRequest.pb(synonymset_service_request.DeleteSynonymSetRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+
+        request = synonymset_service_request.DeleteSynonymSetRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+
+        client.delete_synonym_set(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+
+
+def test_list_synonym_sets_rest_bad_request(request_type=synonymset_service_request.ListSynonymSetsRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = mock.Mock()
+        req.return_value = response_value
+        client.list_synonym_sets(request)
+
+
+@pytest.mark.parametrize("request_type", [
+  synonymset_service_request.ListSynonymSetsRequest,
+  dict,
+])
+def test_list_synonym_sets_rest_call_success(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+
+    # send a request that will satisfy transcoding
+    request_init = {'parent': 'projects/sample1/locations/sample2'}
+    request = request_type(**request_init)
+
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(type(client.transport._session), 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = synonymset_service_request.ListSynonymSetsResponse(
+              next_page_token='next_page_token_value',
+        )
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+
+        # Convert return value to protobuf type
+        return_value = synonymset_service_request.ListSynonymSetsResponse.pb(return_value)
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+        req.return_value = response_value
+        response = client.list_synonym_sets(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListSynonymSetsPager)
+    assert response.next_page_token == 'next_page_token_value'
+
+
+@pytest.mark.parametrize("null_interceptor", [True, False])
+def test_list_synonym_sets_rest_interceptors(null_interceptor):
+    transport = transports.SynonymSetServiceRestTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+        interceptor=None if null_interceptor else transports.SynonymSetServiceRestInterceptor(),
+        )
+    client = SynonymSetServiceClient(transport=transport)
+
+    with mock.patch.object(type(client.transport._session), "request") as req, \
+        mock.patch.object(path_template, "transcode")  as transcode, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "post_list_synonym_sets") as post, \
+        mock.patch.object(transports.SynonymSetServiceRestInterceptor, "pre_list_synonym_sets") as pre:
+        pre.assert_not_called()
+        post.assert_not_called()
+        pb_message = synonymset_service_request.ListSynonymSetsRequest.pb(synonymset_service_request.ListSynonymSetsRequest())
+        transcode.return_value = {
+            "method": "post",
+            "uri": "my_uri",
+            "body": pb_message,
+            "query_params": pb_message,
+        }
+
+        req.return_value = mock.Mock()
+        req.return_value.status_code = 200
+        return_value = synonymset_service_request.ListSynonymSetsResponse.to_json(synonymset_service_request.ListSynonymSetsResponse())
+        req.return_value.content = return_value
+
+        request = synonymset_service_request.ListSynonymSetsRequest()
+        metadata =[
+            ("key", "val"),
+            ("cephalopod", "squid"),
+        ]
+        pre.return_value = request, metadata
+        post.return_value = synonymset_service_request.ListSynonymSetsResponse()
+
+        client.list_synonym_sets(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
+
+        pre.assert_called_once()
+        post.assert_called_once()
+
+
+def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+    request = request_type()
+    request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request)
+
+    # Mock the http request call within the method and fake a BadRequest error.
+    with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest):
+        # Wrap the value into a proper Response obj
+        response_value = Response()
+        json_return_value = ''
+        response_value.json = mock.Mock(return_value={})
+        response_value.status_code = 400
+        response_value.request = Request()
+        req.return_value = response_value
+        client.get_operation(request)
+
+
+@pytest.mark.parametrize("request_type", [
+    operations_pb2.GetOperationRequest,
+    dict,
+])
+def test_get_operation_rest(request_type):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'}
+    request = request_type(**request_init)
+    # Mock the http request call within the method and fake a response.
+    with mock.patch.object(Session, 'request') as req:
+        # Designate an appropriate value for the returned response.
+        return_value = operations_pb2.Operation()
+
+        # Wrap the value into a proper Response obj
+        response_value = mock.Mock()
+        response_value.status_code = 200
+        json_return_value = json_format.MessageToJson(return_value)
+        response_value.content = json_return_value.encode('UTF-8')
+
+        req.return_value = response_value
+
+        response = client.get_operation(request)
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_initialize_client_w_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_synonym_set_empty_call_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.create_synonym_set),
+            '__call__') as call:
+        client.create_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.CreateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_synonym_set_empty_call_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.get_synonym_set),
+            '__call__') as call:
+        client.get_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.GetSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_synonym_set_empty_call_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.update_synonym_set),
+            '__call__') as call:
+        client.update_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.UpdateSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_synonym_set_empty_call_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.delete_synonym_set),
+            '__call__') as call:
+        client.delete_synonym_set(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.DeleteSynonymSetRequest()
+
+        assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_synonym_sets_empty_call_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest",
+    )
+
+    # Mock the actual call, and fake the request.
+    with mock.patch.object(
+            type(client.transport.list_synonym_sets),
+            '__call__') as call:
+        client.list_synonym_sets(request=None)
+
+        # Establish that the underlying stub method was called.
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        request_msg = synonymset_service_request.ListSynonymSetsRequest()
+
+        assert args[0] == request_msg
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    assert isinstance(
+        client.transport,
+        transports.SynonymSetServiceGrpcTransport,
+    )
+
+def test_synonym_set_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.SynonymSetServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json"
+        )
+
+
+def test_synonym_set_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch('google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport.__init__') as Transport:
+        Transport.return_value = None
+        transport = transports.SynonymSetServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        'create_synonym_set',
+        'get_synonym_set',
+        'update_synonym_set',
+        'delete_synonym_set',
+        'list_synonym_sets',
+        'get_operation',
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+    with pytest.raises(NotImplementedError):
+        transport.close()
+
+    # Catch all for all remaining methods and properties
+    remainder = [
+        'kind',
+    ]
+    for r in remainder:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, r)()
+
+
+def test_synonym_set_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.SynonymSetServiceTransport(
+            credentials_file="credentials.json",
+            quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with("credentials.json",
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id="octopus",
+        )
+
+
+def test_synonym_set_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.contentwarehouse_v1.services.synonym_set_service.transports.SynonymSetServiceTransport._prep_wrapped_messages') as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.SynonymSetServiceTransport()
+        adc.assert_called_once()
+
+
+def test_synonym_set_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        SynonymSetServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=(
+            'https://www.googleapis.com/auth/cloud-platform',
+),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.SynonymSetServiceGrpcTransport,
+        transports.SynonymSetServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_synonym_set_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=(                'https://www.googleapis.com/auth/cloud-platform',),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.SynonymSetServiceGrpcTransport,
+        transports.SynonymSetServiceGrpcAsyncIOTransport,
+        transports.SynonymSetServiceRestTransport,
+    ],
+)
+def test_synonym_set_service_transport_auth_gdch_credentials(transport_class):
+    host = 'https://language.com'
+    api_audience_tests = [None, 'https://language2.com']
+    api_audience_expect = [host, 'https://language2.com']
+    for t, e in zip(api_audience_tests, api_audience_expect):
+        with mock.patch.object(google.auth, 'default', autospec=True) as adc:
+            gdch_mock = mock.MagicMock()
+            type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock)
+            adc.return_value = (gdch_mock, None)
+            transport_class(host=host, api_audience=t)
+            gdch_mock.with_gdch_audience.assert_called_once_with(
+                e
+            )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.SynonymSetServiceGrpcTransport, grpc_helpers),
+        (transports.SynonymSetServiceGrpcAsyncIOTransport, grpc_helpers_async)
+    ],
+)
+def test_synonym_set_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(
+            quota_project_id="octopus",
+            scopes=["1", "2"]
+        )
+
+        create_channel.assert_called_with(
+            "contentwarehouse.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=(
+                'https://www.googleapis.com/auth/cloud-platform',
+),
+            scopes=["1", "2"],
+            default_host="contentwarehouse.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize("transport_class", [transports.SynonymSetServiceGrpcTransport, transports.SynonymSetServiceGrpcAsyncIOTransport])
+def test_synonym_set_service_grpc_transport_client_cert_source_for_mtls(
+    transport_class
+):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert,
+                private_key=expected_key
+            )
+
+def test_synonym_set_service_http_transport_client_cert_source_for_mtls():
+    cred = ga_credentials.AnonymousCredentials()
+    with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel:
+        transports.SynonymSetServiceRestTransport (
+            credentials=cred,
+            client_cert_source_for_mtls=client_cert_source_callback
+        )
+        mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_synonym_set_service_host_no_port(transport_name):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com'),
+         transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:443'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "grpc",
+    "grpc_asyncio",
+    "rest",
+])
+def test_synonym_set_service_host_with_port(transport_name):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(api_endpoint='contentwarehouse.googleapis.com:8000'),
+        transport=transport_name,
+    )
+    assert client.transport._host == (
+        'contentwarehouse.googleapis.com:8000'
+        if transport_name in ['grpc', 'grpc_asyncio']
+        else 'https://contentwarehouse.googleapis.com:8000'
+    )
+
+@pytest.mark.parametrize("transport_name", [
+    "rest",
+])
+def test_synonym_set_service_client_transport_session_collision(transport_name):
+    creds1 = ga_credentials.AnonymousCredentials()
+    creds2 = ga_credentials.AnonymousCredentials()
+    client1 = SynonymSetServiceClient(
+        credentials=creds1,
+        transport=transport_name,
+    )
+    client2 = SynonymSetServiceClient(
+        credentials=creds2,
+        transport=transport_name,
+    )
+    session1 = client1.transport.create_synonym_set._session
+    session2 = client2.transport.create_synonym_set._session
+    assert session1 != session2
+    session1 = client1.transport.get_synonym_set._session
+    session2 = client2.transport.get_synonym_set._session
+    assert session1 != session2
+    session1 = client1.transport.update_synonym_set._session
+    session2 = client2.transport.update_synonym_set._session
+    assert session1 != session2
+    session1 = client1.transport.delete_synonym_set._session
+    session2 = client2.transport.delete_synonym_set._session
+    assert session1 != session2
+    session1 = client1.transport.list_synonym_sets._session
+    session2 = client2.transport.list_synonym_sets._session
+    assert session1 != session2
+def test_synonym_set_service_grpc_transport_channel():
+    channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.SynonymSetServiceGrpcTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_synonym_set_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.SynonymSetServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk",
+        channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.SynonymSetServiceGrpcTransport, transports.SynonymSetServiceGrpcAsyncIOTransport])
+def test_synonym_set_service_transport_channel_mtls_with_client_cert_source(
+    transport_class
+):
+    with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, 'default') as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize("transport_class", [transports.SynonymSetServiceGrpcTransport, transports.SynonymSetServiceGrpcAsyncIOTransport])
+def test_synonym_set_service_transport_channel_mtls_with_adc(
+    transport_class
+):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_location_path():
+    project = "squid"
+    location = "clam"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = SynonymSetServiceClient.location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_location_path():
+    expected = {
+        "project": "whelk",
+        "location": "octopus",
+    }
+    path = SynonymSetServiceClient.location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_location_path(path)
+    assert expected == actual
+
+def test_synonym_set_path():
+    project = "oyster"
+    location = "nudibranch"
+    context = "cuttlefish"
+    expected = "projects/{project}/locations/{location}/synonymSets/{context}".format(project=project, location=location, context=context, )
+    actual = SynonymSetServiceClient.synonym_set_path(project, location, context)
+    assert expected == actual
+
+
+def test_parse_synonym_set_path():
+    expected = {
+        "project": "mussel",
+        "location": "winkle",
+        "context": "nautilus",
+    }
+    path = SynonymSetServiceClient.synonym_set_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_synonym_set_path(path)
+    assert expected == actual
+
+def test_common_billing_account_path():
+    billing_account = "scallop"
+    expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
+    actual = SynonymSetServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "abalone",
+    }
+    path = SynonymSetServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+def test_common_folder_path():
+    folder = "squid"
+    expected = "folders/{folder}".format(folder=folder, )
+    actual = SynonymSetServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "clam",
+    }
+    path = SynonymSetServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+def test_common_organization_path():
+    organization = "whelk"
+    expected = "organizations/{organization}".format(organization=organization, )
+    actual = SynonymSetServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "octopus",
+    }
+    path = SynonymSetServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+def test_common_project_path():
+    project = "oyster"
+    expected = "projects/{project}".format(project=project, )
+    actual = SynonymSetServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "nudibranch",
+    }
+    path = SynonymSetServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+def test_common_location_path():
+    project = "cuttlefish"
+    location = "mussel"
+    expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
+    actual = SynonymSetServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "winkle",
+        "location": "nautilus",
+    }
+    path = SynonymSetServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = SynonymSetServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_with_default_client_info():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(transports.SynonymSetServiceTransport, '_prep_wrapped_messages') as prep:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(transports.SynonymSetServiceTransport, '_prep_wrapped_messages') as prep:
+        transport_class = SynonymSetServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(),
+            client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+
+def test_get_operation(transport: str = "grpc"):
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+        response = client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+@pytest.mark.asyncio
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = operations_pb2.GetOperationRequest()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, operations_pb2.Operation)
+
+def test_get_operation_field_headers():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = operations_pb2.Operation()
+
+        client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+@pytest.mark.asyncio
+async def test_get_operation_field_headers_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = operations_pb2.GetOperationRequest()
+    request.name = "locations"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        await client.get_operation(request)
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=locations",) in kw["metadata"]
+
+def test_get_operation_from_dict():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = operations_pb2.Operation()
+
+        response = client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+@pytest.mark.asyncio
+async def test_get_operation_from_dict_async():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+    )
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            operations_pb2.Operation()
+        )
+        response = await client.get_operation(
+            request={
+                "name": "locations",
+            }
+        )
+        call.assert_called()
+
+
+def test_transport_close_grpc():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="grpc"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+    client = SynonymSetServiceAsyncClient(
+        credentials=async_anonymous_credentials(),
+        transport="grpc_asyncio"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close:
+        async with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_transport_close_rest():
+    client = SynonymSetServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        transport="rest"
+    )
+    with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close:
+        with client:
+            close.assert_not_called()
+        close.assert_called_once()
+
+
+def test_client_ctx():
+    transports = [
+        'rest',
+        'grpc',
+    ]
+    for transport in transports:
+        client = SynonymSetServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(),
+            transport=transport
+        )
+        # Test client calls underlying transport.
+        with mock.patch.object(type(client.transport), "close") as close:
+            close.assert_not_called()
+            with client:
+                pass
+            close.assert_called()
+
+@pytest.mark.parametrize("client_class,transport_class", [
+    (SynonymSetServiceClient, transports.SynonymSetServiceGrpcTransport),
+    (SynonymSetServiceAsyncClient, transports.SynonymSetServiceGrpcAsyncIOTransport),
+])
+def test_api_key_credentials(client_class, transport_class):
+    with mock.patch.object(
+        google.auth._default, "get_api_key_credentials", create=True
+    ) as get_api_key_credentials:
+        mock_cred = mock.Mock()
+        get_api_key_credentials.return_value = mock_cred
+        options = client_options.ClientOptions()
+        options.api_key = "api_key"
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+            patched.assert_called_once_with(
+                credentials=mock_cred,
+                credentials_file=None,
+                host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE),
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+                api_audience=None,
+            )