From de9060af990ef4b31ca30bc2e17f4dd20fabae1b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 15:37:19 -0500 Subject: [PATCH] feat: enable "rest" transport in Python for services supporting numeric enums (#158) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../cloud/datastream_v1/gapic_metadata.json | 130 + .../services/datastream/client.py | 2 + .../datastream/transports/__init__.py | 4 + .../services/datastream/transports/rest.py | 4142 ++++++++ .../datastream_v1alpha1/gapic_metadata.json | 110 + .../services/datastream/client.py | 2 + .../datastream/transports/__init__.py | 4 + .../services/datastream/transports/rest.py | 2976 ++++++ .../gapic/datastream_v1/test_datastream.py | 8781 ++++++++++++++++- .../datastream_v1alpha1/test_datastream.py | 6973 ++++++++++++- 10 files changed, 22951 insertions(+), 173 deletions(-) create mode 100644 packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py create mode 100644 packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_metadata.json b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_metadata.json index b6c51175c3ba..e7dc53a5e558 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_metadata.json +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_metadata.json @@ -266,6 +266,136 @@ ] } } + }, + "rest": { + "libraryClient": "DatastreamClient", + "rpcs": { + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "CreateRoute": { + "methods": [ + "create_route" + ] + }, + "CreateStream": { + "methods": [ + "create_stream" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DeleteRoute": { + "methods": [ + "delete_route" + ] + }, + "DeleteStream": { + "methods": [ + "delete_stream" + ] + }, + "DiscoverConnectionProfile": { + "methods": [ + "discover_connection_profile" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "GetRoute": { + "methods": [ + "get_route" + ] + }, + "GetStream": { + "methods": [ + "get_stream" + ] + }, + "GetStreamObject": { + "methods": [ + "get_stream_object" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "ListRoutes": { + "methods": [ + "list_routes" + ] + }, + "ListStreamObjects": { + "methods": [ + "list_stream_objects" + ] + }, + "ListStreams": { + "methods": [ + "list_streams" + ] + }, + "LookupStreamObject": { + "methods": [ + "lookup_stream_object" + ] + }, + "StartBackfillJob": { + "methods": [ + "start_backfill_job" + ] + }, + "StopBackfillJob": { + "methods": [ + "stop_backfill_job" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateStream": { + "methods": [ + "update_stream" + ] + } + } } } } diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index 0c4758edbcd1..89d11e6e166e 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -62,6 +62,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DatastreamTransport from .transports.grpc import DatastreamGrpcTransport from .transports.grpc_asyncio import DatastreamGrpcAsyncIOTransport +from .transports.rest import DatastreamRestTransport class DatastreamClientMeta(type): @@ -75,6 +76,7 @@ class DatastreamClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[DatastreamTransport]] _transport_registry["grpc"] = DatastreamGrpcTransport _transport_registry["grpc_asyncio"] = DatastreamGrpcAsyncIOTransport + _transport_registry["rest"] = DatastreamRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/__init__.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/__init__.py index 064e68286f30..00352319b938 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/__init__.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/__init__.py @@ -19,14 +19,18 @@ from .base import DatastreamTransport from .grpc import DatastreamGrpcTransport from .grpc_asyncio import DatastreamGrpcAsyncIOTransport +from .rest import DatastreamRestInterceptor, DatastreamRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DatastreamTransport]] _transport_registry["grpc"] = DatastreamGrpcTransport _transport_registry["grpc_asyncio"] = DatastreamGrpcAsyncIOTransport +_transport_registry["rest"] = DatastreamRestTransport __all__ = ( "DatastreamTransport", "DatastreamGrpcTransport", "DatastreamGrpcAsyncIOTransport", + "DatastreamRestTransport", + "DatastreamRestInterceptor", ) diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py new file mode 100644 index 000000000000..ad2551f0c61e --- /dev/null +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py @@ -0,0 +1,4142 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.datastream_v1.types import datastream, datastream_resources + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DatastreamTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DatastreamRestInterceptor: + """Interceptor for Datastream. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatastreamRestTransport. + + .. code-block:: python + class MyCustomDatastreamInterceptor(DatastreamRestInterceptor): + def pre_create_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_discover_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_discover_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_static_ips(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_static_ips(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stream_object(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stream_object(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connection_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connection_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_private_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_private_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_routes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_routes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stream_objects(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stream_objects(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_streams(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_streams(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lookup_stream_object(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lookup_stream_object(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_backfill_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_backfill_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_backfill_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_backfill_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stream(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatastreamRestTransport(interceptor=MyCustomDatastreamInterceptor()) + client = DatastreamClient(transport=transport) + + + """ + + def pre_create_connection_profile( + self, + request: datastream.CreateConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_private_connection( + self, + request: datastream.CreatePrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_route( + self, + request: datastream.CreateRouteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_route( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_stream( + self, + request: datastream.CreateStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_connection_profile( + self, + request: datastream.DeleteConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_private_connection( + self, + request: datastream.DeletePrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeletePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_route( + self, + request: datastream.DeleteRouteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_route( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_stream( + self, + request: datastream.DeleteStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_discover_connection_profile( + self, + request: datastream.DiscoverConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DiscoverConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for discover_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_discover_connection_profile( + self, response: datastream.DiscoverConnectionProfileResponse + ) -> datastream.DiscoverConnectionProfileResponse: + """Post-rpc interceptor for discover_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_fetch_static_ips( + self, + request: datastream.FetchStaticIpsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.FetchStaticIpsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_static_ips + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_fetch_static_ips( + self, response: datastream.FetchStaticIpsResponse + ) -> datastream.FetchStaticIpsResponse: + """Post-rpc interceptor for fetch_static_ips + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_connection_profile( + self, + request: datastream.GetConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.GetConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_connection_profile( + self, response: datastream_resources.ConnectionProfile + ) -> datastream_resources.ConnectionProfile: + """Post-rpc interceptor for get_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_private_connection( + self, + request: datastream.GetPrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.GetPrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_private_connection( + self, response: datastream_resources.PrivateConnection + ) -> datastream_resources.PrivateConnection: + """Post-rpc interceptor for get_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_route( + self, request: datastream.GetRouteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.GetRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_route( + self, response: datastream_resources.Route + ) -> datastream_resources.Route: + """Post-rpc interceptor for get_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_stream( + self, request: datastream.GetStreamRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.GetStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_stream( + self, response: datastream_resources.Stream + ) -> datastream_resources.Stream: + """Post-rpc interceptor for get_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_stream_object( + self, + request: datastream.GetStreamObjectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.GetStreamObjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stream_object + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_stream_object( + self, response: datastream_resources.StreamObject + ) -> datastream_resources.StreamObject: + """Post-rpc interceptor for get_stream_object + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_connection_profiles( + self, + request: datastream.ListConnectionProfilesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListConnectionProfilesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connection_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_connection_profiles( + self, response: datastream.ListConnectionProfilesResponse + ) -> datastream.ListConnectionProfilesResponse: + """Post-rpc interceptor for list_connection_profiles + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_private_connections( + self, + request: datastream.ListPrivateConnectionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListPrivateConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_private_connections( + self, response: datastream.ListPrivateConnectionsResponse + ) -> datastream.ListPrivateConnectionsResponse: + """Post-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_routes( + self, request: datastream.ListRoutesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.ListRoutesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_routes( + self, response: datastream.ListRoutesResponse + ) -> datastream.ListRoutesResponse: + """Post-rpc interceptor for list_routes + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_stream_objects( + self, + request: datastream.ListStreamObjectsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListStreamObjectsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_stream_objects + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_stream_objects( + self, response: datastream.ListStreamObjectsResponse + ) -> datastream.ListStreamObjectsResponse: + """Post-rpc interceptor for list_stream_objects + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_streams( + self, + request: datastream.ListStreamsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListStreamsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_streams + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_streams( + self, response: datastream.ListStreamsResponse + ) -> datastream.ListStreamsResponse: + """Post-rpc interceptor for list_streams + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_lookup_stream_object( + self, + request: datastream.LookupStreamObjectRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.LookupStreamObjectRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lookup_stream_object + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_lookup_stream_object( + self, response: datastream_resources.StreamObject + ) -> datastream_resources.StreamObject: + """Post-rpc interceptor for lookup_stream_object + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_start_backfill_job( + self, + request: datastream.StartBackfillJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.StartBackfillJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_backfill_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_start_backfill_job( + self, response: datastream.StartBackfillJobResponse + ) -> datastream.StartBackfillJobResponse: + """Post-rpc interceptor for start_backfill_job + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_stop_backfill_job( + self, + request: datastream.StopBackfillJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.StopBackfillJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_backfill_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_stop_backfill_job( + self, response: datastream.StopBackfillJobResponse + ) -> datastream.StopBackfillJobResponse: + """Post-rpc interceptor for stop_backfill_job + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_update_connection_profile( + self, + request: datastream.UpdateConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.UpdateConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_update_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_update_stream( + self, + request: datastream.UpdateStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.UpdateStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_update_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.Location: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.GetLocationRequest + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> locations_pb2.ListLocationsResponse: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsRequest + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_cancel_operation( + self, response: operations_pb2.CancelOperationRequest + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> None: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_operation( + self, response: operations_pb2.DeleteOperationRequest + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.Operation: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.GetOperationRequest + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> operations_pb2.ListOperationsResponse: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsRequest + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatastreamRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatastreamRestInterceptor + + +class DatastreamRestTransport(DatastreamTransport): + """REST backend transport for Datastream. + + Datastream service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "datastream.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DatastreamRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatastreamRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("CreateConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "connectionProfileId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create connection profile method over HTTP. + + Args: + request (~.datastream.CreateConnectionProfileRequest): + The request object. Request message for creating a + connection profile. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/connectionProfiles", + "body": "connection_profile", + }, + ] + request, metadata = self._interceptor.pre_create_connection_profile( + request, metadata + ) + pb_request = datastream.CreateConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection_profile(resp) + return resp + + class _CreatePrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("CreatePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "privateConnectionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreatePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create private connection method over HTTP. + + Args: + request (~.datastream.CreatePrivateConnectionRequest): + The request object. Request for creating a private + connection. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", + "body": "private_connection", + }, + ] + request, metadata = self._interceptor.pre_create_private_connection( + request, metadata + ) + pb_request = datastream.CreatePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_private_connection(resp) + return resp + + class _CreateRoute(DatastreamRestStub): + def __hash__(self): + return hash("CreateRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "routeId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create route method over HTTP. + + Args: + request (~.datastream.CreateRouteRequest): + The request object. Route creation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/privateConnections/*}/routes", + "body": "route", + }, + ] + request, metadata = self._interceptor.pre_create_route(request, metadata) + pb_request = datastream.CreateRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_route(resp) + return resp + + class _CreateStream(DatastreamRestStub): + def __hash__(self): + return hash("CreateStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "streamId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create stream method over HTTP. + + Args: + request (~.datastream.CreateStreamRequest): + The request object. Request message for creating a + stream. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/streams", + "body": "stream", + }, + ] + request, metadata = self._interceptor.pre_create_stream(request, metadata) + pb_request = datastream.CreateStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stream(resp) + return resp + + class _DeleteConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("DeleteConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete connection profile method over HTTP. + + Args: + request (~.datastream.DeleteConnectionProfileRequest): + The request object. Request message for deleting a + connection profile. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/connectionProfiles/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_connection_profile( + request, metadata + ) + pb_request = datastream.DeleteConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_connection_profile(resp) + return resp + + class _DeletePrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("DeletePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeletePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete private connection method over HTTP. + + Args: + request (~.datastream.DeletePrivateConnectionRequest): + The request object. Request to delete a private + connection. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_private_connection( + request, metadata + ) + pb_request = datastream.DeletePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_private_connection(resp) + return resp + + class _DeleteRoute(DatastreamRestStub): + def __hash__(self): + return hash("DeleteRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete route method over HTTP. + + Args: + request (~.datastream.DeleteRouteRequest): + The request object. Route deletion request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*/routes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_route(request, metadata) + pb_request = datastream.DeleteRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_route(resp) + return resp + + class _DeleteStream(DatastreamRestStub): + def __hash__(self): + return hash("DeleteStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete stream method over HTTP. + + Args: + request (~.datastream.DeleteStreamRequest): + The request object. Request message for deleting a + stream. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/streams/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_stream(request, metadata) + pb_request = datastream.DeleteStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_stream(resp) + return resp + + class _DiscoverConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("DiscoverConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DiscoverConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.DiscoverConnectionProfileResponse: + r"""Call the discover connection + profile method over HTTP. + + Args: + request (~.datastream.DiscoverConnectionProfileRequest): + The request object. Request message for 'discover' + ConnectionProfile request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.DiscoverConnectionProfileResponse: + Response from a discover request. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/connectionProfiles:discover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_discover_connection_profile( + request, metadata + ) + pb_request = datastream.DiscoverConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.DiscoverConnectionProfileResponse() + pb_resp = datastream.DiscoverConnectionProfileResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_discover_connection_profile(resp) + return resp + + class _FetchStaticIps(DatastreamRestStub): + def __hash__(self): + return hash("FetchStaticIps") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.FetchStaticIpsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.FetchStaticIpsResponse: + r"""Call the fetch static ips method over HTTP. + + Args: + request (~.datastream.FetchStaticIpsRequest): + The request object. Request message for 'FetchStaticIps' + request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.FetchStaticIpsResponse: + Response message for a + 'FetchStaticIps' response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}:fetchStaticIps", + }, + ] + request, metadata = self._interceptor.pre_fetch_static_ips( + request, metadata + ) + pb_request = datastream.FetchStaticIpsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.FetchStaticIpsResponse() + pb_resp = datastream.FetchStaticIpsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_static_ips(resp) + return resp + + class _GetConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("GetConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.ConnectionProfile: + r"""Call the get connection profile method over HTTP. + + Args: + request (~.datastream.GetConnectionProfileRequest): + The request object. Request message for getting a + connection profile. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.ConnectionProfile: + A set of reusable connection + configurations to be used as a source or + destination for a stream. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/connectionProfiles/*}", + }, + ] + request, metadata = self._interceptor.pre_get_connection_profile( + request, metadata + ) + pb_request = datastream.GetConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.ConnectionProfile() + pb_resp = datastream_resources.ConnectionProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection_profile(resp) + return resp + + class _GetPrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("GetPrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetPrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.PrivateConnection: + r"""Call the get private connection method over HTTP. + + Args: + request (~.datastream.GetPrivateConnectionRequest): + The request object. Request to get a private connection + configuration. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + between Datastream and a customer's + network. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*}", + }, + ] + request, metadata = self._interceptor.pre_get_private_connection( + request, metadata + ) + pb_request = datastream.GetPrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.PrivateConnection() + pb_resp = datastream_resources.PrivateConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_private_connection(resp) + return resp + + class _GetRoute(DatastreamRestStub): + def __hash__(self): + return hash("GetRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.Route: + r"""Call the get route method over HTTP. + + Args: + request (~.datastream.GetRouteRequest): + The request object. Route get request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.Route: + The route resource is the child of + the private connection resource, used + for defining a route for a private + connection. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/privateConnections/*/routes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_route(request, metadata) + pb_request = datastream.GetRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.Route() + pb_resp = datastream_resources.Route.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_route(resp) + return resp + + class _GetStream(DatastreamRestStub): + def __hash__(self): + return hash("GetStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.Stream: + r"""Call the get stream method over HTTP. + + Args: + request (~.datastream.GetStreamRequest): + The request object. Request message for getting a stream. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.Stream: + A resource representing streaming + data from a source to a destination. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/streams/*}", + }, + ] + request, metadata = self._interceptor.pre_get_stream(request, metadata) + pb_request = datastream.GetStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.Stream() + pb_resp = datastream_resources.Stream.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stream(resp) + return resp + + class _GetStreamObject(DatastreamRestStub): + def __hash__(self): + return hash("GetStreamObject") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetStreamObjectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.StreamObject: + r"""Call the get stream object method over HTTP. + + Args: + request (~.datastream.GetStreamObjectRequest): + The request object. Request for fetching a specific + stream object. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.StreamObject: + A specific stream object (e.g a + specific DB table). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/streams/*/objects/*}", + }, + ] + request, metadata = self._interceptor.pre_get_stream_object( + request, metadata + ) + pb_request = datastream.GetStreamObjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.StreamObject() + pb_resp = datastream_resources.StreamObject.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stream_object(resp) + return resp + + class _ListConnectionProfiles(DatastreamRestStub): + def __hash__(self): + return hash("ListConnectionProfiles") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListConnectionProfilesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListConnectionProfilesResponse: + r"""Call the list connection profiles method over HTTP. + + Args: + request (~.datastream.ListConnectionProfilesRequest): + The request object. Request message for listing + connection profiles. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListConnectionProfilesResponse: + Response message for listing + connection profiles. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/connectionProfiles", + }, + ] + request, metadata = self._interceptor.pre_list_connection_profiles( + request, metadata + ) + pb_request = datastream.ListConnectionProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListConnectionProfilesResponse() + pb_resp = datastream.ListConnectionProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connection_profiles(resp) + return resp + + class _ListPrivateConnections(DatastreamRestStub): + def __hash__(self): + return hash("ListPrivateConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListPrivateConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListPrivateConnectionsResponse: + r"""Call the list private connections method over HTTP. + + Args: + request (~.datastream.ListPrivateConnectionsRequest): + The request object. Request for listing private + connections. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListPrivateConnectionsResponse: + Response containing a list of private + connection configurations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/privateConnections", + }, + ] + request, metadata = self._interceptor.pre_list_private_connections( + request, metadata + ) + pb_request = datastream.ListPrivateConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListPrivateConnectionsResponse() + pb_resp = datastream.ListPrivateConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_private_connections(resp) + return resp + + class _ListRoutes(DatastreamRestStub): + def __hash__(self): + return hash("ListRoutes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListRoutesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListRoutesResponse: + r"""Call the list routes method over HTTP. + + Args: + request (~.datastream.ListRoutesRequest): + The request object. Route list request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListRoutesResponse: + Route list response. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/privateConnections/*}/routes", + }, + ] + request, metadata = self._interceptor.pre_list_routes(request, metadata) + pb_request = datastream.ListRoutesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListRoutesResponse() + pb_resp = datastream.ListRoutesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_routes(resp) + return resp + + class _ListStreamObjects(DatastreamRestStub): + def __hash__(self): + return hash("ListStreamObjects") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListStreamObjectsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListStreamObjectsResponse: + r"""Call the list stream objects method over HTTP. + + Args: + request (~.datastream.ListStreamObjectsRequest): + The request object. Request for listing all objects for a + specific stream. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListStreamObjectsResponse: + Response containing the objects for a + stream. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/streams/*}/objects", + }, + ] + request, metadata = self._interceptor.pre_list_stream_objects( + request, metadata + ) + pb_request = datastream.ListStreamObjectsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListStreamObjectsResponse() + pb_resp = datastream.ListStreamObjectsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_stream_objects(resp) + return resp + + class _ListStreams(DatastreamRestStub): + def __hash__(self): + return hash("ListStreams") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListStreamsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListStreamsResponse: + r"""Call the list streams method over HTTP. + + Args: + request (~.datastream.ListStreamsRequest): + The request object. Request message for listing streams. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListStreamsResponse: + Response message for listing streams. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/streams", + }, + ] + request, metadata = self._interceptor.pre_list_streams(request, metadata) + pb_request = datastream.ListStreamsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListStreamsResponse() + pb_resp = datastream.ListStreamsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_streams(resp) + return resp + + class _LookupStreamObject(DatastreamRestStub): + def __hash__(self): + return hash("LookupStreamObject") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.LookupStreamObjectRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.StreamObject: + r"""Call the lookup stream object method over HTTP. + + Args: + request (~.datastream.LookupStreamObjectRequest): + The request object. Request for looking up a specific + stream object by its source object + identifier. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.StreamObject: + A specific stream object (e.g a + specific DB table). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/streams/*}/objects:lookup", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lookup_stream_object( + request, metadata + ) + pb_request = datastream.LookupStreamObjectRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.StreamObject() + pb_resp = datastream_resources.StreamObject.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lookup_stream_object(resp) + return resp + + class _StartBackfillJob(DatastreamRestStub): + def __hash__(self): + return hash("StartBackfillJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.StartBackfillJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.StartBackfillJobResponse: + r"""Call the start backfill job method over HTTP. + + Args: + request (~.datastream.StartBackfillJobRequest): + The request object. Request for manually initiating a + backfill job for a specific stream + object. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.StartBackfillJobResponse: + Response for manually initiating a + backfill job for a specific stream + object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{object_=projects/*/locations/*/streams/*/objects/*}:startBackfillJob", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_backfill_job( + request, metadata + ) + pb_request = datastream.StartBackfillJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.StartBackfillJobResponse() + pb_resp = datastream.StartBackfillJobResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_backfill_job(resp) + return resp + + class _StopBackfillJob(DatastreamRestStub): + def __hash__(self): + return hash("StopBackfillJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.StopBackfillJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.StopBackfillJobResponse: + r"""Call the stop backfill job method over HTTP. + + Args: + request (~.datastream.StopBackfillJobRequest): + The request object. Request for manually stopping a + running backfill job for a specific + stream object. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.StopBackfillJobResponse: + Response for manually stop a backfill + job for a specific stream object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{object_=projects/*/locations/*/streams/*/objects/*}:stopBackfillJob", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_backfill_job( + request, metadata + ) + pb_request = datastream.StopBackfillJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.StopBackfillJobResponse() + pb_resp = datastream.StopBackfillJobResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_backfill_job(resp) + return resp + + class _UpdateConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("UpdateConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.UpdateConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update connection profile method over HTTP. + + Args: + request (~.datastream.UpdateConnectionProfileRequest): + The request object. Connection profile update message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{connection_profile.name=projects/*/locations/*/connectionProfiles/*}", + "body": "connection_profile", + }, + ] + request, metadata = self._interceptor.pre_update_connection_profile( + request, metadata + ) + pb_request = datastream.UpdateConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection_profile(resp) + return resp + + class _UpdateStream(DatastreamRestStub): + def __hash__(self): + return hash("UpdateStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.UpdateStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update stream method over HTTP. + + Args: + request (~.datastream.UpdateStreamRequest): + The request object. Request message for updating a + stream. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{stream.name=projects/*/locations/*/streams/*}", + "body": "stream", + }, + ] + request, metadata = self._interceptor.pre_update_stream(request, metadata) + pb_request = datastream.UpdateStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stream(resp) + return resp + + @property + def create_connection_profile( + self, + ) -> Callable[ + [datastream.CreateConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_private_connection( + self, + ) -> Callable[ + [datastream.CreatePrivateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_route( + self, + ) -> Callable[[datastream.CreateRouteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stream( + self, + ) -> Callable[[datastream.CreateStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection_profile( + self, + ) -> Callable[ + [datastream.DeleteConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_private_connection( + self, + ) -> Callable[ + [datastream.DeletePrivateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_route( + self, + ) -> Callable[[datastream.DeleteRouteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stream( + self, + ) -> Callable[[datastream.DeleteStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def discover_connection_profile( + self, + ) -> Callable[ + [datastream.DiscoverConnectionProfileRequest], + datastream.DiscoverConnectionProfileResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DiscoverConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [datastream.FetchStaticIpsRequest], datastream.FetchStaticIpsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchStaticIps(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection_profile( + self, + ) -> Callable[ + [datastream.GetConnectionProfileRequest], datastream_resources.ConnectionProfile + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_private_connection( + self, + ) -> Callable[ + [datastream.GetPrivateConnectionRequest], datastream_resources.PrivateConnection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_route( + self, + ) -> Callable[[datastream.GetRouteRequest], datastream_resources.Route]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stream( + self, + ) -> Callable[[datastream.GetStreamRequest], datastream_resources.Stream]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stream_object( + self, + ) -> Callable[ + [datastream.GetStreamObjectRequest], datastream_resources.StreamObject + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStreamObject(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connection_profiles( + self, + ) -> Callable[ + [datastream.ListConnectionProfilesRequest], + datastream.ListConnectionProfilesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnectionProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_private_connections( + self, + ) -> Callable[ + [datastream.ListPrivateConnectionsRequest], + datastream.ListPrivateConnectionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPrivateConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_routes( + self, + ) -> Callable[[datastream.ListRoutesRequest], datastream.ListRoutesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRoutes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stream_objects( + self, + ) -> Callable[ + [datastream.ListStreamObjectsRequest], datastream.ListStreamObjectsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStreamObjects(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_streams( + self, + ) -> Callable[[datastream.ListStreamsRequest], datastream.ListStreamsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStreams(self._session, self._host, self._interceptor) # type: ignore + + @property + def lookup_stream_object( + self, + ) -> Callable[ + [datastream.LookupStreamObjectRequest], datastream_resources.StreamObject + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LookupStreamObject(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_backfill_job( + self, + ) -> Callable[ + [datastream.StartBackfillJobRequest], datastream.StartBackfillJobResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartBackfillJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_backfill_job( + self, + ) -> Callable[ + [datastream.StopBackfillJobRequest], datastream.StopBackfillJobResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopBackfillJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection_profile( + self, + ) -> Callable[ + [datastream.UpdateConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stream( + self, + ) -> Callable[[datastream.UpdateStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DatastreamRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DatastreamRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(DatastreamRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(DatastreamRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(DatastreamRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(DatastreamRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DatastreamRestTransport",) diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_metadata.json b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_metadata.json index a82501e99d35..fc483ba9bede 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_metadata.json +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_metadata.json @@ -226,6 +226,116 @@ ] } } + }, + "rest": { + "libraryClient": "DatastreamClient", + "rpcs": { + "CreateConnectionProfile": { + "methods": [ + "create_connection_profile" + ] + }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, + "CreateRoute": { + "methods": [ + "create_route" + ] + }, + "CreateStream": { + "methods": [ + "create_stream" + ] + }, + "DeleteConnectionProfile": { + "methods": [ + "delete_connection_profile" + ] + }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DeleteRoute": { + "methods": [ + "delete_route" + ] + }, + "DeleteStream": { + "methods": [ + "delete_stream" + ] + }, + "DiscoverConnectionProfile": { + "methods": [ + "discover_connection_profile" + ] + }, + "FetchErrors": { + "methods": [ + "fetch_errors" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, + "GetConnectionProfile": { + "methods": [ + "get_connection_profile" + ] + }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "GetRoute": { + "methods": [ + "get_route" + ] + }, + "GetStream": { + "methods": [ + "get_stream" + ] + }, + "ListConnectionProfiles": { + "methods": [ + "list_connection_profiles" + ] + }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, + "ListRoutes": { + "methods": [ + "list_routes" + ] + }, + "ListStreams": { + "methods": [ + "list_streams" + ] + }, + "UpdateConnectionProfile": { + "methods": [ + "update_connection_profile" + ] + }, + "UpdateStream": { + "methods": [ + "update_stream" + ] + } + } } } } diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index 69a689fa4393..2898315703fa 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -58,6 +58,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DatastreamTransport from .transports.grpc import DatastreamGrpcTransport from .transports.grpc_asyncio import DatastreamGrpcAsyncIOTransport +from .transports.rest import DatastreamRestTransport class DatastreamClientMeta(type): @@ -71,6 +72,7 @@ class DatastreamClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[DatastreamTransport]] _transport_registry["grpc"] = DatastreamGrpcTransport _transport_registry["grpc_asyncio"] = DatastreamGrpcAsyncIOTransport + _transport_registry["rest"] = DatastreamRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/__init__.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/__init__.py index 064e68286f30..00352319b938 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/__init__.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/__init__.py @@ -19,14 +19,18 @@ from .base import DatastreamTransport from .grpc import DatastreamGrpcTransport from .grpc_asyncio import DatastreamGrpcAsyncIOTransport +from .rest import DatastreamRestInterceptor, DatastreamRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DatastreamTransport]] _transport_registry["grpc"] = DatastreamGrpcTransport _transport_registry["grpc_asyncio"] = DatastreamGrpcAsyncIOTransport +_transport_registry["rest"] = DatastreamRestTransport __all__ = ( "DatastreamTransport", "DatastreamGrpcTransport", "DatastreamGrpcAsyncIOTransport", + "DatastreamRestTransport", + "DatastreamRestInterceptor", ) diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py new file mode 100644 index 000000000000..8e255c695097 --- /dev/null +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py @@ -0,0 +1,2976 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.datastream_v1alpha1.types import datastream, datastream_resources + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DatastreamTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DatastreamRestInterceptor: + """Interceptor for Datastream. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatastreamRestTransport. + + .. code-block:: python + class MyCustomDatastreamInterceptor(DatastreamRestInterceptor): + def pre_create_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_discover_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_discover_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_errors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_errors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_static_ips(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_static_ips(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_private_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_private_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_route(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_route(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stream(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connection_profiles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connection_profiles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_private_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_private_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_routes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_routes(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_streams(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_streams(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection_profile(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection_profile(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stream(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stream(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatastreamRestTransport(interceptor=MyCustomDatastreamInterceptor()) + client = DatastreamClient(transport=transport) + + + """ + + def pre_create_connection_profile( + self, + request: datastream.CreateConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_private_connection( + self, + request: datastream.CreatePrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreatePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_route( + self, + request: datastream.CreateRouteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_route( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_create_stream( + self, + request: datastream.CreateStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.CreateStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_create_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_connection_profile( + self, + request: datastream.DeleteConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_private_connection( + self, + request: datastream.DeletePrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeletePrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_private_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_route( + self, + request: datastream.DeleteRouteRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_route( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_delete_stream( + self, + request: datastream.DeleteStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DeleteStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_delete_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_discover_connection_profile( + self, + request: datastream.DiscoverConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.DiscoverConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for discover_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_discover_connection_profile( + self, response: datastream.DiscoverConnectionProfileResponse + ) -> datastream.DiscoverConnectionProfileResponse: + """Post-rpc interceptor for discover_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_fetch_errors( + self, + request: datastream.FetchErrorsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.FetchErrorsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_errors + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_fetch_errors( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for fetch_errors + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_fetch_static_ips( + self, + request: datastream.FetchStaticIpsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.FetchStaticIpsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_static_ips + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_fetch_static_ips( + self, response: datastream.FetchStaticIpsResponse + ) -> datastream.FetchStaticIpsResponse: + """Post-rpc interceptor for fetch_static_ips + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_connection_profile( + self, + request: datastream.GetConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.GetConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_connection_profile( + self, response: datastream_resources.ConnectionProfile + ) -> datastream_resources.ConnectionProfile: + """Post-rpc interceptor for get_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_private_connection( + self, + request: datastream.GetPrivateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.GetPrivateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_private_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_private_connection( + self, response: datastream_resources.PrivateConnection + ) -> datastream_resources.PrivateConnection: + """Post-rpc interceptor for get_private_connection + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_route( + self, request: datastream.GetRouteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.GetRouteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_route + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_route( + self, response: datastream_resources.Route + ) -> datastream_resources.Route: + """Post-rpc interceptor for get_route + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_get_stream( + self, request: datastream.GetStreamRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.GetStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_get_stream( + self, response: datastream_resources.Stream + ) -> datastream_resources.Stream: + """Post-rpc interceptor for get_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_connection_profiles( + self, + request: datastream.ListConnectionProfilesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListConnectionProfilesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connection_profiles + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_connection_profiles( + self, response: datastream.ListConnectionProfilesResponse + ) -> datastream.ListConnectionProfilesResponse: + """Post-rpc interceptor for list_connection_profiles + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_private_connections( + self, + request: datastream.ListPrivateConnectionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListPrivateConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_private_connections( + self, response: datastream.ListPrivateConnectionsResponse + ) -> datastream.ListPrivateConnectionsResponse: + """Post-rpc interceptor for list_private_connections + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_routes( + self, request: datastream.ListRoutesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[datastream.ListRoutesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_routes + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_routes( + self, response: datastream.ListRoutesResponse + ) -> datastream.ListRoutesResponse: + """Post-rpc interceptor for list_routes + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_list_streams( + self, + request: datastream.ListStreamsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.ListStreamsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_streams + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_list_streams( + self, response: datastream.ListStreamsResponse + ) -> datastream.ListStreamsResponse: + """Post-rpc interceptor for list_streams + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_update_connection_profile( + self, + request: datastream.UpdateConnectionProfileRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.UpdateConnectionProfileRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection_profile + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_update_connection_profile( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_connection_profile + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + def pre_update_stream( + self, + request: datastream.UpdateStreamRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datastream.UpdateStreamRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stream + + Override in a subclass to manipulate the request or metadata + before they are sent to the Datastream server. + """ + return request, metadata + + def post_update_stream( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_stream + + Override in a subclass to manipulate the response + after it is returned by the Datastream server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatastreamRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatastreamRestInterceptor + + +class DatastreamRestTransport(DatastreamTransport): + """REST backend transport for Datastream. + + Datastream service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "datastream.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DatastreamRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatastreamRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("CreateConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "connectionProfileId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create connection profile method over HTTP. + + Args: + request (~.datastream.CreateConnectionProfileRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/connectionProfiles", + "body": "connection_profile", + }, + ] + request, metadata = self._interceptor.pre_create_connection_profile( + request, metadata + ) + pb_request = datastream.CreateConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection_profile(resp) + return resp + + class _CreatePrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("CreatePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "privateConnectionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreatePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create private connection method over HTTP. + + Args: + request (~.datastream.CreatePrivateConnectionRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/privateConnections", + "body": "private_connection", + }, + ] + request, metadata = self._interceptor.pre_create_private_connection( + request, metadata + ) + pb_request = datastream.CreatePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_private_connection(resp) + return resp + + class _CreateRoute(DatastreamRestStub): + def __hash__(self): + return hash("CreateRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "routeId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create route method over HTTP. + + Args: + request (~.datastream.CreateRouteRequest): + The request object. route creation request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*/privateConnections/*}/routes", + "body": "route", + }, + ] + request, metadata = self._interceptor.pre_create_route(request, metadata) + pb_request = datastream.CreateRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_route(resp) + return resp + + class _CreateStream(DatastreamRestStub): + def __hash__(self): + return hash("CreateStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "streamId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.CreateStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create stream method over HTTP. + + Args: + request (~.datastream.CreateStreamRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/streams", + "body": "stream", + }, + ] + request, metadata = self._interceptor.pre_create_stream(request, metadata) + pb_request = datastream.CreateStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stream(resp) + return resp + + class _DeleteConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("DeleteConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete connection profile method over HTTP. + + Args: + request (~.datastream.DeleteConnectionProfileRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/connectionProfiles/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_connection_profile( + request, metadata + ) + pb_request = datastream.DeleteConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_connection_profile(resp) + return resp + + class _DeletePrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("DeletePrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeletePrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete private connection method over HTTP. + + Args: + request (~.datastream.DeletePrivateConnectionRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/privateConnections/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_private_connection( + request, metadata + ) + pb_request = datastream.DeletePrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_private_connection(resp) + return resp + + class _DeleteRoute(DatastreamRestStub): + def __hash__(self): + return hash("DeleteRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete route method over HTTP. + + Args: + request (~.datastream.DeleteRouteRequest): + The request object. route deletion request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/privateConnections/*/routes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_route(request, metadata) + pb_request = datastream.DeleteRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_route(resp) + return resp + + class _DeleteStream(DatastreamRestStub): + def __hash__(self): + return hash("DeleteStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DeleteStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete stream method over HTTP. + + Args: + request (~.datastream.DeleteStreamRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha1/{name=projects/*/locations/*/streams/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_stream(request, metadata) + pb_request = datastream.DeleteStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_stream(resp) + return resp + + class _DiscoverConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("DiscoverConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.DiscoverConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.DiscoverConnectionProfileResponse: + r"""Call the discover connection + profile method over HTTP. + + Args: + request (~.datastream.DiscoverConnectionProfileRequest): + The request object. Request message for 'discover' + ConnectionProfile request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.DiscoverConnectionProfileResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/connectionProfiles:discover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_discover_connection_profile( + request, metadata + ) + pb_request = datastream.DiscoverConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.DiscoverConnectionProfileResponse() + pb_resp = datastream.DiscoverConnectionProfileResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_discover_connection_profile(resp) + return resp + + class _FetchErrors(DatastreamRestStub): + def __hash__(self): + return hash("FetchErrors") + + def __call__( + self, + request: datastream.FetchErrorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the fetch errors method over HTTP. + + Args: + request (~.datastream.FetchErrorsRequest): + The request object. Request message for 'FetchErrors' + request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha1/{stream=projects/*/locations/*/streams/*}:fetchErrors", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_fetch_errors(request, metadata) + pb_request = datastream.FetchErrorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_errors(resp) + return resp + + class _FetchStaticIps(DatastreamRestStub): + def __hash__(self): + return hash("FetchStaticIps") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.FetchStaticIpsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.FetchStaticIpsResponse: + r"""Call the fetch static ips method over HTTP. + + Args: + request (~.datastream.FetchStaticIpsRequest): + The request object. Request message for 'FetchStaticIps' + request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.FetchStaticIpsResponse: + Response message for a + 'FetchStaticIps' response. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*}:fetchStaticIps", + }, + ] + request, metadata = self._interceptor.pre_fetch_static_ips( + request, metadata + ) + pb_request = datastream.FetchStaticIpsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.FetchStaticIpsResponse() + pb_resp = datastream.FetchStaticIpsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_static_ips(resp) + return resp + + class _GetConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("GetConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.ConnectionProfile: + r"""Call the get connection profile method over HTTP. + + Args: + request (~.datastream.GetConnectionProfileRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.ConnectionProfile: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/connectionProfiles/*}", + }, + ] + request, metadata = self._interceptor.pre_get_connection_profile( + request, metadata + ) + pb_request = datastream.GetConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.ConnectionProfile() + pb_resp = datastream_resources.ConnectionProfile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection_profile(resp) + return resp + + class _GetPrivateConnection(DatastreamRestStub): + def __hash__(self): + return hash("GetPrivateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetPrivateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.PrivateConnection: + r"""Call the get private connection method over HTTP. + + Args: + request (~.datastream.GetPrivateConnectionRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + between Datastream and a customer's + network. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/privateConnections/*}", + }, + ] + request, metadata = self._interceptor.pre_get_private_connection( + request, metadata + ) + pb_request = datastream.GetPrivateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.PrivateConnection() + pb_resp = datastream_resources.PrivateConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_private_connection(resp) + return resp + + class _GetRoute(DatastreamRestStub): + def __hash__(self): + return hash("GetRoute") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetRouteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.Route: + r"""Call the get route method over HTTP. + + Args: + request (~.datastream.GetRouteRequest): + The request object. route get request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.Route: + The Route resource is the child of + the PrivateConnection resource. It used + to define a route for a + PrivateConnection setup. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/privateConnections/*/routes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_route(request, metadata) + pb_request = datastream.GetRouteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.Route() + pb_resp = datastream_resources.Route.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_route(resp) + return resp + + class _GetStream(DatastreamRestStub): + def __hash__(self): + return hash("GetStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.GetStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream_resources.Stream: + r"""Call the get stream method over HTTP. + + Args: + request (~.datastream.GetStreamRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream_resources.Stream: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{name=projects/*/locations/*/streams/*}", + }, + ] + request, metadata = self._interceptor.pre_get_stream(request, metadata) + pb_request = datastream.GetStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream_resources.Stream() + pb_resp = datastream_resources.Stream.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stream(resp) + return resp + + class _ListConnectionProfiles(DatastreamRestStub): + def __hash__(self): + return hash("ListConnectionProfiles") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListConnectionProfilesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListConnectionProfilesResponse: + r"""Call the list connection profiles method over HTTP. + + Args: + request (~.datastream.ListConnectionProfilesRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListConnectionProfilesResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/connectionProfiles", + }, + ] + request, metadata = self._interceptor.pre_list_connection_profiles( + request, metadata + ) + pb_request = datastream.ListConnectionProfilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListConnectionProfilesResponse() + pb_resp = datastream.ListConnectionProfilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connection_profiles(resp) + return resp + + class _ListPrivateConnections(DatastreamRestStub): + def __hash__(self): + return hash("ListPrivateConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListPrivateConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListPrivateConnectionsResponse: + r"""Call the list private connections method over HTTP. + + Args: + request (~.datastream.ListPrivateConnectionsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListPrivateConnectionsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/privateConnections", + }, + ] + request, metadata = self._interceptor.pre_list_private_connections( + request, metadata + ) + pb_request = datastream.ListPrivateConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListPrivateConnectionsResponse() + pb_resp = datastream.ListPrivateConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_private_connections(resp) + return resp + + class _ListRoutes(DatastreamRestStub): + def __hash__(self): + return hash("ListRoutes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListRoutesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListRoutesResponse: + r"""Call the list routes method over HTTP. + + Args: + request (~.datastream.ListRoutesRequest): + The request object. route list request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListRoutesResponse: + route list response + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*/privateConnections/*}/routes", + }, + ] + request, metadata = self._interceptor.pre_list_routes(request, metadata) + pb_request = datastream.ListRoutesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListRoutesResponse() + pb_resp = datastream.ListRoutesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_routes(resp) + return resp + + class _ListStreams(DatastreamRestStub): + def __hash__(self): + return hash("ListStreams") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.ListStreamsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datastream.ListStreamsResponse: + r"""Call the list streams method over HTTP. + + Args: + request (~.datastream.ListStreamsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datastream.ListStreamsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha1/{parent=projects/*/locations/*}/streams", + }, + ] + request, metadata = self._interceptor.pre_list_streams(request, metadata) + pb_request = datastream.ListStreamsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datastream.ListStreamsResponse() + pb_resp = datastream.ListStreamsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_streams(resp) + return resp + + class _UpdateConnectionProfile(DatastreamRestStub): + def __hash__(self): + return hash("UpdateConnectionProfile") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.UpdateConnectionProfileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update connection profile method over HTTP. + + Args: + request (~.datastream.UpdateConnectionProfileRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{connection_profile.name=projects/*/locations/*/connectionProfiles/*}", + "body": "connection_profile", + }, + ] + request, metadata = self._interceptor.pre_update_connection_profile( + request, metadata + ) + pb_request = datastream.UpdateConnectionProfileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection_profile(resp) + return resp + + class _UpdateStream(DatastreamRestStub): + def __hash__(self): + return hash("UpdateStream") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datastream.UpdateStreamRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update stream method over HTTP. + + Args: + request (~.datastream.UpdateStreamRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha1/{stream.name=projects/*/locations/*/streams/*}", + "body": "stream", + }, + ] + request, metadata = self._interceptor.pre_update_stream(request, metadata) + pb_request = datastream.UpdateStreamRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stream(resp) + return resp + + @property + def create_connection_profile( + self, + ) -> Callable[ + [datastream.CreateConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_private_connection( + self, + ) -> Callable[ + [datastream.CreatePrivateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_route( + self, + ) -> Callable[[datastream.CreateRouteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stream( + self, + ) -> Callable[[datastream.CreateStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection_profile( + self, + ) -> Callable[ + [datastream.DeleteConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_private_connection( + self, + ) -> Callable[ + [datastream.DeletePrivateConnectionRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_route( + self, + ) -> Callable[[datastream.DeleteRouteRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stream( + self, + ) -> Callable[[datastream.DeleteStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def discover_connection_profile( + self, + ) -> Callable[ + [datastream.DiscoverConnectionProfileRequest], + datastream.DiscoverConnectionProfileResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DiscoverConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_errors( + self, + ) -> Callable[[datastream.FetchErrorsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchErrors(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [datastream.FetchStaticIpsRequest], datastream.FetchStaticIpsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchStaticIps(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection_profile( + self, + ) -> Callable[ + [datastream.GetConnectionProfileRequest], datastream_resources.ConnectionProfile + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_private_connection( + self, + ) -> Callable[ + [datastream.GetPrivateConnectionRequest], datastream_resources.PrivateConnection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPrivateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_route( + self, + ) -> Callable[[datastream.GetRouteRequest], datastream_resources.Route]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetRoute(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stream( + self, + ) -> Callable[[datastream.GetStreamRequest], datastream_resources.Stream]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connection_profiles( + self, + ) -> Callable[ + [datastream.ListConnectionProfilesRequest], + datastream.ListConnectionProfilesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnectionProfiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_private_connections( + self, + ) -> Callable[ + [datastream.ListPrivateConnectionsRequest], + datastream.ListPrivateConnectionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPrivateConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_routes( + self, + ) -> Callable[[datastream.ListRoutesRequest], datastream.ListRoutesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListRoutes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_streams( + self, + ) -> Callable[[datastream.ListStreamsRequest], datastream.ListStreamsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStreams(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection_profile( + self, + ) -> Callable[ + [datastream.UpdateConnectionProfileRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnectionProfile(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stream( + self, + ) -> Callable[[datastream.UpdateStreamRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStream(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DatastreamRestTransport",) diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index b58ae80e7ed0..66b7935f30de 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import ( @@ -48,12 +50,15 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.datastream_v1.services.datastream import ( DatastreamAsyncClient, @@ -110,6 +115,7 @@ def test__get_default_mtls_endpoint(): [ (DatastreamClient, "grpc"), (DatastreamAsyncClient, "grpc_asyncio"), + (DatastreamClient, "rest"), ], ) def test_datastream_client_from_service_account_info(client_class, transport_name): @@ -123,7 +129,11 @@ def test_datastream_client_from_service_account_info(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) @pytest.mark.parametrize( @@ -131,6 +141,7 @@ def test_datastream_client_from_service_account_info(client_class, transport_nam [ (transports.DatastreamGrpcTransport, "grpc"), (transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatastreamRestTransport, "rest"), ], ) def test_datastream_client_service_account_always_use_jwt( @@ -156,6 +167,7 @@ def test_datastream_client_service_account_always_use_jwt( [ (DatastreamClient, "grpc"), (DatastreamAsyncClient, "grpc_asyncio"), + (DatastreamClient, "rest"), ], ) def test_datastream_client_from_service_account_file(client_class, transport_name): @@ -176,13 +188,18 @@ def test_datastream_client_from_service_account_file(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) def test_datastream_client_get_transport_class(): transport = DatastreamClient.get_transport_class() available_transports = [ transports.DatastreamGrpcTransport, + transports.DatastreamRestTransport, ] assert transport in available_transports @@ -199,6 +216,7 @@ def test_datastream_client_get_transport_class(): transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest"), ], ) @mock.patch.object( @@ -342,6 +360,8 @@ def test_datastream_client_client_options( "grpc_asyncio", "false", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest", "true"), + (DatastreamClient, transports.DatastreamRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -535,6 +555,7 @@ def test_datastream_client_get_mtls_endpoint_and_cert_source(client_class): transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest"), ], ) def test_datastream_client_client_options_scopes( @@ -570,6 +591,7 @@ def test_datastream_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DatastreamClient, transports.DatastreamRestTransport, "rest", None), ], ) def test_datastream_client_client_options_credentials_file( @@ -7814,131 +7836,8247 @@ async def test_delete_route_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastreamGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListConnectionProfilesRequest, + dict, + ], +) +def test_list_connection_profiles_rest(request_type): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastreamClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastreamGrpcTransport( + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_connection_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_connection_profiles_rest_required_fields( + request_type=datastream.ListConnectionProfilesRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connection_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connection_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_connection_profiles(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_connection_profiles_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - with pytest.raises(ValueError): - client = DatastreamClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + + unset_fields = transport.list_connection_profiles._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) + & set(("parent",)) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastreamGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connection_profiles_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastreamClient( - client_options=options, - transport=transport, + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_connection_profiles" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_connection_profiles" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListConnectionProfilesRequest.pb( + datastream.ListConnectionProfilesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListConnectionProfilesResponse.to_json( + datastream.ListConnectionProfilesResponse() ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastreamClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + request = datastream.ListConnectionProfilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListConnectionProfilesResponse() + + client.list_connection_profiles( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide scopes and a transport instance. - transport = transports.DatastreamGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connection_profiles_rest_bad_request( + transport: str = "rest", request_type=datastream.ListConnectionProfilesRequest +): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DatastreamClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastreamGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connection_profiles(request) + + +def test_list_connection_profiles_rest_flattened(): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DatastreamClient(transport=transport) - assert client.transport is transport + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastreamGrpcTransport( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_connection_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connectionProfiles" + % client.transport._host, + args[1], + ) + + +def test_list_connection_profiles_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.DatastreamGrpcAsyncIOTransport( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connection_profiles( + datastream.ListConnectionProfilesRequest(), + parent="parent_value", + ) + + +def test_list_connection_profiles_rest_pager(transport: str = "rest"): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + ], + next_page_token="abc", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token="def", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + ], + next_page_token="ghi", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastream.ListConnectionProfilesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastreamGrpcTransport, - transports.DatastreamGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_connection_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datastream_resources.ConnectionProfile) for i in results + ) + + pages = list(client.list_connection_profiles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + datastream.GetConnectionProfileRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DatastreamClient.get_transport_class(transport_name)( +def test_get_connection_profile_rest(request_type): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile( + name="name_value", + display_name="display_name_value", + oracle_profile=datastream_resources.OracleProfile( + hostname="hostname_value" + ), + static_service_ip_connectivity=None, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.ConnectionProfile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_connection_profile_rest_required_fields( + request_type=datastream.GetConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - assert isinstance( - client.transport, - transports.DatastreamGrpcTransport, + + unset_fields = transport.get_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetConnectionProfileRequest.pb( + datastream.GetConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.ConnectionProfile.to_json( + datastream_resources.ConnectionProfile() + ) + request = datastream.GetConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.ConnectionProfile() -def test_datastream_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastreamTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + client.get_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_datastream_base_transport(): + +def test_get_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.GetConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection_profile(request) + + +def test_get_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_get_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_profile( + datastream.GetConnectionProfileRequest(), + name="name_value", + ) + + +def test_get_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateConnectionProfileRequest, + dict, + ], +) +def test_create_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection_profile"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": {"bucket": "bucket_value", "root_path": "root_path_value"}, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "bigquery_profile": {}, + "postgresql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database": "database_value", + }, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": {"private_connection": "private_connection_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_connection_profile_rest_required_fields( + request_type=datastream.CreateConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["connection_profile_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "connectionProfileId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "connectionProfileId" in jsonified_request + assert ( + jsonified_request["connectionProfileId"] + == request_init["connection_profile_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["connectionProfileId"] = "connection_profile_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "connection_profile_id", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "connectionProfileId" in jsonified_request + assert jsonified_request["connectionProfileId"] == "connection_profile_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_connection_profile(request) + + expected_params = [ + ( + "connectionProfileId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "connectionProfileId", + "force", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "connectionProfileId", + "connectionProfile", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateConnectionProfileRequest.pb( + datastream.CreateConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection_profile"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": {"bucket": "bucket_value", "root_path": "root_path_value"}, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "bigquery_profile": {}, + "postgresql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database": "database_value", + }, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": {"private_connection": "private_connection_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection_profile(request) + + +def test_create_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + connection_profile_id="connection_profile_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connectionProfiles" + % client.transport._host, + args[1], + ) + + +def test_create_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection_profile( + datastream.CreateConnectionProfileRequest(), + parent="parent_value", + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + connection_profile_id="connection_profile_id_value", + ) + + +def test_create_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.UpdateConnectionProfileRequest, + dict, + ], +) +def test_update_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + request_init["connection_profile"] = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": {"bucket": "bucket_value", "root_path": "root_path_value"}, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "bigquery_profile": {}, + "postgresql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database": "database_value", + }, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": {"private_connection": "private_connection_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_connection_profile_rest_required_fields( + request_type=datastream.UpdateConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("connectionProfile",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_update_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.UpdateConnectionProfileRequest.pb( + datastream.UpdateConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.UpdateConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.UpdateConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + request_init["connection_profile"] = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": {"bucket": "bucket_value", "root_path": "root_path_value"}, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "bigquery_profile": {}, + "postgresql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database": "database_value", + }, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": {"private_connection": "private_connection_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection_profile(request) + + +def test_update_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{connection_profile.name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_update_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection_profile( + datastream.UpdateConnectionProfileRequest(), + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteConnectionProfileRequest, + dict, + ], +) +def test_delete_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_connection_profile_rest_required_fields( + request_type=datastream.DeleteConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteConnectionProfileRequest.pb( + datastream.DeleteConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection_profile(request) + + +def test_delete_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection_profile( + datastream.DeleteConnectionProfileRequest(), + name="name_value", + ) + + +def test_delete_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DiscoverConnectionProfileRequest, + dict, + ], +) +def test_discover_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.DiscoverConnectionProfileResponse( + oracle_rdbms=datastream_resources.OracleRdbms( + oracle_schemas=[ + datastream_resources.OracleSchema(schema="schema_value") + ] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.DiscoverConnectionProfileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.discover_connection_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream.DiscoverConnectionProfileResponse) + + +def test_discover_connection_profile_rest_required_fields( + request_type=datastream.DiscoverConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).discover_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).discover_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.DiscoverConnectionProfileResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.DiscoverConnectionProfileResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.discover_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_discover_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.discover_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_discover_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_discover_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_discover_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DiscoverConnectionProfileRequest.pb( + datastream.DiscoverConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + datastream.DiscoverConnectionProfileResponse.to_json( + datastream.DiscoverConnectionProfileResponse() + ) + ) + + request = datastream.DiscoverConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.DiscoverConnectionProfileResponse() + + client.discover_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_discover_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.DiscoverConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.discover_connection_profile(request) + + +def test_discover_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListStreamsRequest, + dict, + ], +) +def test_list_streams_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_streams(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStreamsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_streams_rest_required_fields(request_type=datastream.ListStreamsRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_streams._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_streams._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_streams(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_streams_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_streams._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_streams_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_streams" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_streams" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListStreamsRequest.pb(datastream.ListStreamsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListStreamsResponse.to_json( + datastream.ListStreamsResponse() + ) + + request = datastream.ListStreamsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListStreamsResponse() + + client.list_streams( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_streams_rest_bad_request( + transport: str = "rest", request_type=datastream.ListStreamsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_streams(request) + + +def test_list_streams_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_streams(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/streams" % client.transport._host, + args[1], + ) + + +def test_list_streams_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_streams( + datastream.ListStreamsRequest(), + parent="parent_value", + ) + + +def test_list_streams_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + datastream_resources.Stream(), + datastream_resources.Stream(), + ], + next_page_token="abc", + ), + datastream.ListStreamsResponse( + streams=[], + next_page_token="def", + ), + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + ], + next_page_token="ghi", + ), + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + datastream_resources.Stream(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.ListStreamsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_streams(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datastream_resources.Stream) for i in results) + + pages = list(client.list_streams(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetStreamRequest, + dict, + ], +) +def test_get_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream( + name="name_value", + display_name="display_name_value", + state=datastream_resources.Stream.State.NOT_STARTED, + customer_managed_encryption_key="customer_managed_encryption_key_value", + backfill_all=datastream_resources.Stream.BackfillAllStrategy( + oracle_excluded_objects=datastream_resources.OracleRdbms( + oracle_schemas=[ + datastream_resources.OracleSchema(schema="schema_value") + ] + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stream(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.Stream) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == datastream_resources.Stream.State.NOT_STARTED + assert ( + response.customer_managed_encryption_key + == "customer_managed_encryption_key_value" + ) + + +def test_get_stream_rest_required_fields(request_type=datastream.GetStreamRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetStreamRequest.pb(datastream.GetStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.Stream.to_json( + datastream_resources.Stream() + ) + + request = datastream.GetStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.Stream() + + client.get_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.GetStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stream(request) + + +def test_get_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/streams/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/streams/*}" % client.transport._host, + args[1], + ) + + +def test_get_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stream( + datastream.GetStreamRequest(), + name="name_value", + ) + + +def test_get_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateStreamRequest, + dict, + ], +) +def test_create_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["stream"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile": "source_connection_profile_value", + "oracle_source_config": { + "include_objects": { + "oracle_schemas": [ + { + "schema": "schema_value", + "oracle_tables": [ + { + "table": "table_value", + "oracle_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + "drop_large_objects": {}, + "stream_large_objects": {}, + }, + "mysql_source_config": { + "include_objects": { + "mysql_databases": [ + { + "database": "database_value", + "mysql_tables": [ + { + "table": "table_value", + "mysql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + }, + "postgresql_source_config": { + "include_objects": { + "postgresql_schemas": [ + { + "schema": "schema_value", + "postgresql_tables": [ + { + "table": "table_value", + "postgresql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "replication_slot": "replication_slot_value", + "publication": "publication_value", + }, + }, + "destination_config": { + "destination_connection_profile": "destination_connection_profile_value", + "gcs_destination_config": { + "path": "path_value", + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + "bigquery_destination_config": { + "single_target_dataset": {"dataset_id": "dataset_id_value"}, + "source_hierarchy_datasets": { + "dataset_template": { + "location": "location_value", + "dataset_id_prefix": "dataset_id_prefix_value", + "kms_key_name": "kms_key_name_value", + } + }, + "data_freshness": {}, + }, + }, + "state": 1, + "backfill_all": { + "oracle_excluded_objects": {}, + "mysql_excluded_objects": {}, + "postgresql_excluded_objects": {}, + }, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + "customer_managed_encryption_key": "customer_managed_encryption_key_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_stream_rest_required_fields( + request_type=datastream.CreateStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["stream_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "streamId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "streamId" in jsonified_request + assert jsonified_request["streamId"] == request_init["stream_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["streamId"] = "stream_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "stream_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "streamId" in jsonified_request + assert jsonified_request["streamId"] == "stream_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_stream(request) + + expected_params = [ + ( + "streamId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_stream._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "streamId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "streamId", + "stream", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateStreamRequest.pb(datastream.CreateStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["stream"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile": "source_connection_profile_value", + "oracle_source_config": { + "include_objects": { + "oracle_schemas": [ + { + "schema": "schema_value", + "oracle_tables": [ + { + "table": "table_value", + "oracle_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + "drop_large_objects": {}, + "stream_large_objects": {}, + }, + "mysql_source_config": { + "include_objects": { + "mysql_databases": [ + { + "database": "database_value", + "mysql_tables": [ + { + "table": "table_value", + "mysql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + }, + "postgresql_source_config": { + "include_objects": { + "postgresql_schemas": [ + { + "schema": "schema_value", + "postgresql_tables": [ + { + "table": "table_value", + "postgresql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "replication_slot": "replication_slot_value", + "publication": "publication_value", + }, + }, + "destination_config": { + "destination_connection_profile": "destination_connection_profile_value", + "gcs_destination_config": { + "path": "path_value", + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + "bigquery_destination_config": { + "single_target_dataset": {"dataset_id": "dataset_id_value"}, + "source_hierarchy_datasets": { + "dataset_template": { + "location": "location_value", + "dataset_id_prefix": "dataset_id_prefix_value", + "kms_key_name": "kms_key_name_value", + } + }, + "data_freshness": {}, + }, + }, + "state": 1, + "backfill_all": { + "oracle_excluded_objects": {}, + "mysql_excluded_objects": {}, + "postgresql_excluded_objects": {}, + }, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + "customer_managed_encryption_key": "customer_managed_encryption_key_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stream(request) + + +def test_create_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + stream=datastream_resources.Stream(name="name_value"), + stream_id="stream_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/streams" % client.transport._host, + args[1], + ) + + +def test_create_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stream( + datastream.CreateStreamRequest(), + parent="parent_value", + stream=datastream_resources.Stream(name="name_value"), + stream_id="stream_id_value", + ) + + +def test_create_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.UpdateStreamRequest, + dict, + ], +) +def test_update_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + request_init["stream"] = { + "name": "projects/sample1/locations/sample2/streams/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile": "source_connection_profile_value", + "oracle_source_config": { + "include_objects": { + "oracle_schemas": [ + { + "schema": "schema_value", + "oracle_tables": [ + { + "table": "table_value", + "oracle_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + "drop_large_objects": {}, + "stream_large_objects": {}, + }, + "mysql_source_config": { + "include_objects": { + "mysql_databases": [ + { + "database": "database_value", + "mysql_tables": [ + { + "table": "table_value", + "mysql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + }, + "postgresql_source_config": { + "include_objects": { + "postgresql_schemas": [ + { + "schema": "schema_value", + "postgresql_tables": [ + { + "table": "table_value", + "postgresql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "replication_slot": "replication_slot_value", + "publication": "publication_value", + }, + }, + "destination_config": { + "destination_connection_profile": "destination_connection_profile_value", + "gcs_destination_config": { + "path": "path_value", + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + "bigquery_destination_config": { + "single_target_dataset": {"dataset_id": "dataset_id_value"}, + "source_hierarchy_datasets": { + "dataset_template": { + "location": "location_value", + "dataset_id_prefix": "dataset_id_prefix_value", + "kms_key_name": "kms_key_name_value", + } + }, + "data_freshness": {}, + }, + }, + "state": 1, + "backfill_all": { + "oracle_excluded_objects": {}, + "mysql_excluded_objects": {}, + "postgresql_excluded_objects": {}, + }, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + "customer_managed_encryption_key": "customer_managed_encryption_key_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_stream_rest_required_fields( + request_type=datastream.UpdateStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_stream._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("stream",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_update_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.UpdateStreamRequest.pb(datastream.UpdateStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.UpdateStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.UpdateStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + request_init["stream"] = { + "name": "projects/sample1/locations/sample2/streams/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile": "source_connection_profile_value", + "oracle_source_config": { + "include_objects": { + "oracle_schemas": [ + { + "schema": "schema_value", + "oracle_tables": [ + { + "table": "table_value", + "oracle_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + "drop_large_objects": {}, + "stream_large_objects": {}, + }, + "mysql_source_config": { + "include_objects": { + "mysql_databases": [ + { + "database": "database_value", + "mysql_tables": [ + { + "table": "table_value", + "mysql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "max_concurrent_cdc_tasks": 2550, + }, + "postgresql_source_config": { + "include_objects": { + "postgresql_schemas": [ + { + "schema": "schema_value", + "postgresql_tables": [ + { + "table": "table_value", + "postgresql_columns": [ + { + "column": "column_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "exclude_objects": {}, + "replication_slot": "replication_slot_value", + "publication": "publication_value", + }, + }, + "destination_config": { + "destination_connection_profile": "destination_connection_profile_value", + "gcs_destination_config": { + "path": "path_value", + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + "bigquery_destination_config": { + "single_target_dataset": {"dataset_id": "dataset_id_value"}, + "source_hierarchy_datasets": { + "dataset_template": { + "location": "location_value", + "dataset_id_prefix": "dataset_id_prefix_value", + "kms_key_name": "kms_key_name_value", + } + }, + "data_freshness": {}, + }, + }, + "state": 1, + "backfill_all": { + "oracle_excluded_objects": {}, + "mysql_excluded_objects": {}, + "postgresql_excluded_objects": {}, + }, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + "customer_managed_encryption_key": "customer_managed_encryption_key_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stream(request) + + +def test_update_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + stream=datastream_resources.Stream(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{stream.name=projects/*/locations/*/streams/*}" + % client.transport._host, + args[1], + ) + + +def test_update_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stream( + datastream.UpdateStreamRequest(), + stream=datastream_resources.Stream(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteStreamRequest, + dict, + ], +) +def test_delete_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_stream_rest_required_fields( + request_type=datastream.DeleteStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteStreamRequest.pb(datastream.DeleteStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stream(request) + + +def test_delete_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/streams/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/streams/*}" % client.transport._host, + args[1], + ) + + +def test_delete_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stream( + datastream.DeleteStreamRequest(), + name="name_value", + ) + + +def test_delete_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetStreamObjectRequest, + dict, + ], +) +def test_get_stream_object_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.StreamObject( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.StreamObject.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stream_object(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.StreamObject) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_stream_object_rest_required_fields( + request_type=datastream.GetStreamObjectRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream_object._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream_object._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.StreamObject() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.StreamObject.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_stream_object(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_stream_object_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_stream_object._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stream_object_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream_object" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_stream_object" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetStreamObjectRequest.pb( + datastream.GetStreamObjectRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.StreamObject.to_json( + datastream_resources.StreamObject() + ) + + request = datastream.GetStreamObjectRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.StreamObject() + + client.get_stream_object( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stream_object_rest_bad_request( + transport: str = "rest", request_type=datastream.GetStreamObjectRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stream_object(request) + + +def test_get_stream_object_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.StreamObject() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.StreamObject.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_stream_object(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/streams/*/objects/*}" + % client.transport._host, + args[1], + ) + + +def test_get_stream_object_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stream_object( + datastream.GetStreamObjectRequest(), + name="name_value", + ) + + +def test_get_stream_object_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.LookupStreamObjectRequest, + dict, + ], +) +def test_lookup_stream_object_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.StreamObject( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.StreamObject.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_stream_object(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.StreamObject) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_lookup_stream_object_rest_required_fields( + request_type=datastream.LookupStreamObjectRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_stream_object._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_stream_object._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.StreamObject() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.StreamObject.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lookup_stream_object(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_stream_object_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lookup_stream_object._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "sourceObjectIdentifier", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_stream_object_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_lookup_stream_object" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_lookup_stream_object" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.LookupStreamObjectRequest.pb( + datastream.LookupStreamObjectRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.StreamObject.to_json( + datastream_resources.StreamObject() + ) + + request = datastream.LookupStreamObjectRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.StreamObject() + + client.lookup_stream_object( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lookup_stream_object_rest_bad_request( + transport: str = "rest", request_type=datastream.LookupStreamObjectRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_stream_object(request) + + +def test_lookup_stream_object_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListStreamObjectsRequest, + dict, + ], +) +def test_list_stream_objects_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamObjectsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamObjectsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_stream_objects(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStreamObjectsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_stream_objects_rest_required_fields( + request_type=datastream.ListStreamObjectsRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stream_objects._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stream_objects._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamObjectsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListStreamObjectsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_stream_objects(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_stream_objects_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_stream_objects._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stream_objects_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_stream_objects" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_stream_objects" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListStreamObjectsRequest.pb( + datastream.ListStreamObjectsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListStreamObjectsResponse.to_json( + datastream.ListStreamObjectsResponse() + ) + + request = datastream.ListStreamObjectsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListStreamObjectsResponse() + + client.list_stream_objects( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stream_objects_rest_bad_request( + transport: str = "rest", request_type=datastream.ListStreamObjectsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stream_objects(request) + + +def test_list_stream_objects_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamObjectsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/streams/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamObjectsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_stream_objects(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/streams/*}/objects" + % client.transport._host, + args[1], + ) + + +def test_list_stream_objects_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stream_objects( + datastream.ListStreamObjectsRequest(), + parent="parent_value", + ) + + +def test_list_stream_objects_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListStreamObjectsResponse( + stream_objects=[ + datastream_resources.StreamObject(), + datastream_resources.StreamObject(), + datastream_resources.StreamObject(), + ], + next_page_token="abc", + ), + datastream.ListStreamObjectsResponse( + stream_objects=[], + next_page_token="def", + ), + datastream.ListStreamObjectsResponse( + stream_objects=[ + datastream_resources.StreamObject(), + ], + next_page_token="ghi", + ), + datastream.ListStreamObjectsResponse( + stream_objects=[ + datastream_resources.StreamObject(), + datastream_resources.StreamObject(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastream.ListStreamObjectsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/streams/sample3" + } + + pager = client.list_stream_objects(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datastream_resources.StreamObject) for i in results) + + pages = list(client.list_stream_objects(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.StartBackfillJobRequest, + dict, + ], +) +def test_start_backfill_job_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.StartBackfillJobResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.StartBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_backfill_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream.StartBackfillJobResponse) + + +def test_start_backfill_job_rest_required_fields( + request_type=datastream.StartBackfillJobRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["object_"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_backfill_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["object"] = "object__value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_backfill_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "object" in jsonified_request + assert jsonified_request["object"] == "object__value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.StartBackfillJobResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.StartBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.start_backfill_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_start_backfill_job_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.start_backfill_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("object",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_backfill_job_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_start_backfill_job" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_start_backfill_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.StartBackfillJobRequest.pb( + datastream.StartBackfillJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.StartBackfillJobResponse.to_json( + datastream.StartBackfillJobResponse() + ) + + request = datastream.StartBackfillJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.StartBackfillJobResponse() + + client.start_backfill_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_backfill_job_rest_bad_request( + transport: str = "rest", request_type=datastream.StartBackfillJobRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_backfill_job(request) + + +def test_start_backfill_job_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.StartBackfillJobResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + object_="object__value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.StartBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.start_backfill_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{object_=projects/*/locations/*/streams/*/objects/*}:startBackfillJob" + % client.transport._host, + args[1], + ) + + +def test_start_backfill_job_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_backfill_job( + datastream.StartBackfillJobRequest(), + object_="object__value", + ) + + +def test_start_backfill_job_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.StopBackfillJobRequest, + dict, + ], +) +def test_stop_backfill_job_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.StopBackfillJobResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.StopBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.stop_backfill_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream.StopBackfillJobResponse) + + +def test_stop_backfill_job_rest_required_fields( + request_type=datastream.StopBackfillJobRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["object_"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_backfill_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["object"] = "object__value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_backfill_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "object" in jsonified_request + assert jsonified_request["object"] == "object__value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.StopBackfillJobResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.StopBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.stop_backfill_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stop_backfill_job_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_backfill_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("object",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_backfill_job_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_stop_backfill_job" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_stop_backfill_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.StopBackfillJobRequest.pb( + datastream.StopBackfillJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.StopBackfillJobResponse.to_json( + datastream.StopBackfillJobResponse() + ) + + request = datastream.StopBackfillJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.StopBackfillJobResponse() + + client.stop_backfill_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_backfill_job_rest_bad_request( + transport: str = "rest", request_type=datastream.StopBackfillJobRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_backfill_job(request) + + +def test_stop_backfill_job_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.StopBackfillJobResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "object_": "projects/sample1/locations/sample2/streams/sample3/objects/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + object_="object__value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.StopBackfillJobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.stop_backfill_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{object_=projects/*/locations/*/streams/*/objects/*}:stopBackfillJob" + % client.transport._host, + args[1], + ) + + +def test_stop_backfill_job_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stop_backfill_job( + datastream.StopBackfillJobRequest(), + object_="object__value", + ) + + +def test_stop_backfill_job_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.FetchStaticIpsRequest, + dict, + ], +) +def test_fetch_static_ips_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_static_ips(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_static_ips_rest_required_fields( + request_type=datastream.FetchStaticIpsRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_static_ips._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_static_ips._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_static_ips(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_static_ips_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_static_ips._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_static_ips_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_static_ips" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_fetch_static_ips" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.FetchStaticIpsRequest.pb( + datastream.FetchStaticIpsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.FetchStaticIpsResponse.to_json( + datastream.FetchStaticIpsResponse() + ) + + request = datastream.FetchStaticIpsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.FetchStaticIpsResponse() + + client.fetch_static_ips( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_static_ips_rest_bad_request( + transport: str = "rest", request_type=datastream.FetchStaticIpsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_static_ips(request) + + +def test_fetch_static_ips_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_static_ips(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:fetchStaticIps" + % client.transport._host, + args[1], + ) + + +def test_fetch_static_ips_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + datastream.FetchStaticIpsRequest(), + name="name_value", + ) + + +def test_fetch_static_ips_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + datastream.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.FetchStaticIpsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "projects/sample1/locations/sample2"} + + pager = client.fetch_static_ips(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.fetch_static_ips(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreatePrivateConnectionRequest, + dict, + ], +) +def test_create_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_connection"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "error": { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + }, + "vpc_peering_config": {"vpc": "vpc_value", "subnet": "subnet_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_private_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_private_connection_rest_required_fields( + request_type=datastream.CreatePrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["private_connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "privateConnectionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "privateConnectionId" in jsonified_request + assert ( + jsonified_request["privateConnectionId"] + == request_init["private_connection_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["privateConnectionId"] = "private_connection_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "private_connection_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "privateConnectionId" in jsonified_request + assert jsonified_request["privateConnectionId"] == "private_connection_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_private_connection(request) + + expected_params = [ + ( + "privateConnectionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "privateConnectionId", + "requestId", + ) + ) + & set( + ( + "parent", + "privateConnectionId", + "privateConnection", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreatePrivateConnectionRequest.pb( + datastream.CreatePrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreatePrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.CreatePrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_connection"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "error": { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + }, + "vpc_peering_config": {"vpc": "vpc_value", "subnet": "subnet_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_private_connection(request) + + +def test_create_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + private_connection=datastream_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/privateConnections" + % client.transport._host, + args[1], + ) + + +def test_create_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + datastream.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=datastream_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + +def test_create_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetPrivateConnectionRequest, + dict, + ], +) +def test_get_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=datastream_resources.PrivateConnection.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_private_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == datastream_resources.PrivateConnection.State.CREATING + + +def test_get_private_connection_rest_required_fields( + request_type=datastream.GetPrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_private_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetPrivateConnectionRequest.pb( + datastream.GetPrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.PrivateConnection.to_json( + datastream_resources.PrivateConnection() + ) + + request = datastream.GetPrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.PrivateConnection() + + client.get_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.GetPrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_private_connection(request) + + +def test_get_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateConnections/*}" + % client.transport._host, + args[1], + ) + + +def test_get_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + datastream.GetPrivateConnectionRequest(), + name="name_value", + ) + + +def test_get_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListPrivateConnectionsRequest, + dict, + ], +) +def test_list_private_connections_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_private_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_connections_rest_required_fields( + request_type=datastream.ListPrivateConnectionsRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_private_connections(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_private_connections_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_private_connections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_private_connections_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_private_connections" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_private_connections" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListPrivateConnectionsRequest.pb( + datastream.ListPrivateConnectionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListPrivateConnectionsResponse.to_json( + datastream.ListPrivateConnectionsResponse() + ) + + request = datastream.ListPrivateConnectionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListPrivateConnectionsResponse() + + client.list_private_connections( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_private_connections_rest_bad_request( + transport: str = "rest", request_type=datastream.ListPrivateConnectionsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_private_connections(request) + + +def test_list_private_connections_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_private_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/privateConnections" + % client.transport._host, + args[1], + ) + + +def test_list_private_connections_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + datastream.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_private_connections_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastream.ListPrivateConnectionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_private_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datastream_resources.PrivateConnection) for i in results + ) + + pages = list(client.list_private_connections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeletePrivateConnectionRequest, + dict, + ], +) +def test_delete_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_private_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_private_connection_rest_required_fields( + request_type=datastream.DeletePrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_private_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeletePrivateConnectionRequest.pb( + datastream.DeletePrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeletePrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.DeletePrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_private_connection(request) + + +def test_delete_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateConnections/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + datastream.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +def test_delete_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateRouteRequest, + dict, + ], +) +def test_create_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request_init["route"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "destination_address": "destination_address_value", + "destination_port": 1734, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_route(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_route_rest_required_fields(request_type=datastream.CreateRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["route_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "routeId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "routeId" in jsonified_request + assert jsonified_request["routeId"] == request_init["route_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["routeId"] = "route_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_route._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "route_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "routeId" in jsonified_request + assert jsonified_request["routeId"] == "route_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_route(request) + + expected_params = [ + ( + "routeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_route._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "routeId", + ) + ) + & set( + ( + "parent", + "routeId", + "route", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateRouteRequest.pb(datastream.CreateRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_route_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request_init["route"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "destination_address": "destination_address_value", + "destination_port": 1734, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_route(request) + + +def test_create_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + route=datastream_resources.Route(name="name_value"), + route_id="route_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateConnections/*}/routes" + % client.transport._host, + args[1], + ) + + +def test_create_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_route( + datastream.CreateRouteRequest(), + parent="parent_value", + route=datastream_resources.Route(name="name_value"), + route_id="route_id_value", + ) + + +def test_create_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetRouteRequest, + dict, + ], +) +def test_get_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route( + name="name_value", + display_name="display_name_value", + destination_address="destination_address_value", + destination_port=1734, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_route(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.Route) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.destination_address == "destination_address_value" + assert response.destination_port == 1734 + + +def test_get_route_rest_required_fields(request_type=datastream.GetRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_route(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_route._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetRouteRequest.pb(datastream.GetRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.Route.to_json( + datastream_resources.Route() + ) + + request = datastream.GetRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.Route() + + client.get_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_route_rest_bad_request( + transport: str = "rest", request_type=datastream.GetRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_route(request) + + +def test_get_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateConnections/*/routes/*}" + % client.transport._host, + args[1], + ) + + +def test_get_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_route( + datastream.GetRouteRequest(), + name="name_value", + ) + + +def test_get_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListRoutesRequest, + dict, + ], +) +def test_list_routes_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_routes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRoutesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_routes_rest_required_fields(request_type=datastream.ListRoutesRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_routes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_routes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_routes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_routes_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_routes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_routes_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_routes" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_routes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListRoutesRequest.pb(datastream.ListRoutesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListRoutesResponse.to_json( + datastream.ListRoutesResponse() + ) + + request = datastream.ListRoutesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListRoutesResponse() + + client.list_routes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_routes_rest_bad_request( + transport: str = "rest", request_type=datastream.ListRoutesRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_routes(request) + + +def test_list_routes_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_routes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/privateConnections/*}/routes" + % client.transport._host, + args[1], + ) + + +def test_list_routes_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_routes( + datastream.ListRoutesRequest(), + parent="parent_value", + ) + + +def test_list_routes_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + datastream_resources.Route(), + datastream_resources.Route(), + ], + next_page_token="abc", + ), + datastream.ListRoutesResponse( + routes=[], + next_page_token="def", + ), + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + ], + next_page_token="ghi", + ), + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + datastream_resources.Route(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.ListRoutesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + pager = client.list_routes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datastream_resources.Route) for i in results) + + pages = list(client.list_routes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteRouteRequest, + dict, + ], +) +def test_delete_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_route(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_route_rest_required_fields(request_type=datastream.DeleteRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_route._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_route(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_route._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteRouteRequest.pb(datastream.DeleteRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_route_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_route(request) + + +def test_delete_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/privateConnections/*/routes/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_route( + datastream.DeleteRouteRequest(), + name="name_value", + ) + + +def test_delete_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastreamClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastreamClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatastreamClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastreamGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastreamGrpcTransport, + transports.DatastreamGrpcAsyncIOTransport, + transports.DatastreamRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DatastreamClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastreamGrpcTransport, + ) + + +def test_datastream_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatastreamTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastream_base_transport(): # Instantiate the base transport. with mock.patch( "google.cloud.datastream_v1.services.datastream.transports.DatastreamTransport.__init__" @@ -8073,6 +16211,7 @@ def test_datastream_transport_auth_adc(transport_class): [ transports.DatastreamGrpcTransport, transports.DatastreamGrpcAsyncIOTransport, + transports.DatastreamRestTransport, ], ) def test_datastream_transport_auth_gdch_credentials(transport_class): @@ -8167,11 +16306,40 @@ def test_datastream_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_datastream_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DatastreamRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_datastream_rest_lro_client(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastream_host_no_port(transport_name): @@ -8182,7 +16350,11 @@ def test_datastream_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) @pytest.mark.parametrize( @@ -8190,6 +16362,7 @@ def test_datastream_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastream_host_with_port(transport_name): @@ -8200,7 +16373,105 @@ def test_datastream_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastream.googleapis.com:8000") + assert client.transport._host == ( + "datastream.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_datastream_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatastreamClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatastreamClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_connection_profiles._session + session2 = client2.transport.list_connection_profiles._session + assert session1 != session2 + session1 = client1.transport.get_connection_profile._session + session2 = client2.transport.get_connection_profile._session + assert session1 != session2 + session1 = client1.transport.create_connection_profile._session + session2 = client2.transport.create_connection_profile._session + assert session1 != session2 + session1 = client1.transport.update_connection_profile._session + session2 = client2.transport.update_connection_profile._session + assert session1 != session2 + session1 = client1.transport.delete_connection_profile._session + session2 = client2.transport.delete_connection_profile._session + assert session1 != session2 + session1 = client1.transport.discover_connection_profile._session + session2 = client2.transport.discover_connection_profile._session + assert session1 != session2 + session1 = client1.transport.list_streams._session + session2 = client2.transport.list_streams._session + assert session1 != session2 + session1 = client1.transport.get_stream._session + session2 = client2.transport.get_stream._session + assert session1 != session2 + session1 = client1.transport.create_stream._session + session2 = client2.transport.create_stream._session + assert session1 != session2 + session1 = client1.transport.update_stream._session + session2 = client2.transport.update_stream._session + assert session1 != session2 + session1 = client1.transport.delete_stream._session + session2 = client2.transport.delete_stream._session + assert session1 != session2 + session1 = client1.transport.get_stream_object._session + session2 = client2.transport.get_stream_object._session + assert session1 != session2 + session1 = client1.transport.lookup_stream_object._session + session2 = client2.transport.lookup_stream_object._session + assert session1 != session2 + session1 = client1.transport.list_stream_objects._session + session2 = client2.transport.list_stream_objects._session + assert session1 != session2 + session1 = client1.transport.start_backfill_job._session + session2 = client2.transport.start_backfill_job._session + assert session1 != session2 + session1 = client1.transport.stop_backfill_job._session + session2 = client2.transport.stop_backfill_job._session + assert session1 != session2 + session1 = client1.transport.fetch_static_ips._session + session2 = client2.transport.fetch_static_ips._session + assert session1 != session2 + session1 = client1.transport.create_private_connection._session + session2 = client2.transport.create_private_connection._session + assert session1 != session2 + session1 = client1.transport.get_private_connection._session + session2 = client2.transport.get_private_connection._session + assert session1 != session2 + session1 = client1.transport.list_private_connections._session + session2 = client2.transport.list_private_connections._session + assert session1 != session2 + session1 = client1.transport.delete_private_connection._session + session2 = client2.transport.delete_private_connection._session + assert session1 != session2 + session1 = client1.transport.create_route._session + session2 = client2.transport.create_route._session + assert session1 != session2 + session1 = client1.transport.get_route._session + session2 = client2.transport.get_route._session + assert session1 != session2 + session1 = client1.transport.list_routes._session + session2 = client2.transport.list_routes._session + assert session1 != session2 + session1 = client1.transport.delete_route._session + session2 = client2.transport.delete_route._session + assert session1 != session2 def test_datastream_grpc_transport_channel(): @@ -8658,6 +16929,352 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + def test_delete_operation(transport: str = "grpc"): client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9514,6 +18131,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -9531,6 +18149,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index c0b1fab764ef..1d48fc5575be 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import ( @@ -44,12 +46,15 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.datastream_v1alpha1.services.datastream import ( DatastreamAsyncClient, @@ -106,6 +111,7 @@ def test__get_default_mtls_endpoint(): [ (DatastreamClient, "grpc"), (DatastreamAsyncClient, "grpc_asyncio"), + (DatastreamClient, "rest"), ], ) def test_datastream_client_from_service_account_info(client_class, transport_name): @@ -119,7 +125,11 @@ def test_datastream_client_from_service_account_info(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) @pytest.mark.parametrize( @@ -127,6 +137,7 @@ def test_datastream_client_from_service_account_info(client_class, transport_nam [ (transports.DatastreamGrpcTransport, "grpc"), (transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatastreamRestTransport, "rest"), ], ) def test_datastream_client_service_account_always_use_jwt( @@ -152,6 +163,7 @@ def test_datastream_client_service_account_always_use_jwt( [ (DatastreamClient, "grpc"), (DatastreamAsyncClient, "grpc_asyncio"), + (DatastreamClient, "rest"), ], ) def test_datastream_client_from_service_account_file(client_class, transport_name): @@ -172,13 +184,18 @@ def test_datastream_client_from_service_account_file(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) def test_datastream_client_get_transport_class(): transport = DatastreamClient.get_transport_class() available_transports = [ transports.DatastreamGrpcTransport, + transports.DatastreamRestTransport, ] assert transport in available_transports @@ -195,6 +212,7 @@ def test_datastream_client_get_transport_class(): transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest"), ], ) @mock.patch.object( @@ -338,6 +356,8 @@ def test_datastream_client_client_options( "grpc_asyncio", "false", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest", "true"), + (DatastreamClient, transports.DatastreamRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -531,6 +551,7 @@ def test_datastream_client_get_mtls_endpoint_and_cert_source(client_class): transports.DatastreamGrpcAsyncIOTransport, "grpc_asyncio", ), + (DatastreamClient, transports.DatastreamRestTransport, "rest"), ], ) def test_datastream_client_client_options_scopes( @@ -566,6 +587,7 @@ def test_datastream_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (DatastreamClient, transports.DatastreamRestTransport, "rest", None), ], ) def test_datastream_client_client_options_credentials_file( @@ -6610,143 +6632,6787 @@ async def test_delete_route_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatastreamGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListConnectionProfilesRequest, + dict, + ], +) +def test_list_connection_profiles_rest(request_type): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DatastreamClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatastreamGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_connection_profiles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionProfilesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_connection_profiles_rest_required_fields( + request_type=datastream.ListConnectionProfilesRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - with pytest.raises(ValueError): - client = DatastreamClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connection_profiles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connection_profiles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_connection_profiles(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_connection_profiles_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_connection_profiles._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) + & set(("parent",)) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DatastreamGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connection_profiles_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastreamClient( - client_options=options, - transport=transport, + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_connection_profiles" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_connection_profiles" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListConnectionProfilesRequest.pb( + datastream.ListConnectionProfilesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListConnectionProfilesResponse.to_json( + datastream.ListConnectionProfilesResponse() ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatastreamClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + request = datastream.ListConnectionProfilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListConnectionProfilesResponse() + + client.list_connection_profiles( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide scopes and a transport instance. - transport = transports.DatastreamGrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connection_profiles_rest_bad_request( + transport: str = "rest", request_type=datastream.ListConnectionProfilesRequest +): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DatastreamClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastreamGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connection_profiles(request) + + +def test_list_connection_profiles_rest_flattened(): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = DatastreamClient(transport=transport) - assert client.transport is transport + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListConnectionProfilesResponse() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatastreamGrpcTransport( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListConnectionProfilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_connection_profiles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/connectionProfiles" + % client.transport._host, + args[1], + ) + + +def test_list_connection_profiles_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel - transport = transports.DatastreamGrpcAsyncIOTransport( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connection_profiles( + datastream.ListConnectionProfilesRequest(), + parent="parent_value", + ) + + +def test_list_connection_profiles_rest_pager(transport: str = "rest"): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + ], + next_page_token="abc", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[], + next_page_token="def", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + ], + next_page_token="ghi", + ), + datastream.ListConnectionProfilesResponse( + connection_profiles=[ + datastream_resources.ConnectionProfile(), + datastream_resources.ConnectionProfile(), + ], + ), + ) + # Two responses for two calls + response = response + response -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatastreamGrpcTransport, - transports.DatastreamGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Wrap the values into proper Response objs + response = tuple( + datastream.ListConnectionProfilesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_connection_profiles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datastream_resources.ConnectionProfile) for i in results + ) + + pages = list(client.list_connection_profiles(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + datastream.GetConnectionProfileRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DatastreamClient.get_transport_class(transport_name)( +def test_get_connection_profile_rest(request_type): + client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile( + name="name_value", + display_name="display_name_value", + oracle_profile=datastream_resources.OracleProfile( + hostname="hostname_value" + ), + no_connectivity=None, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.ConnectionProfile) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_connection_profile_rest_required_fields( + request_type=datastream.GetConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = DatastreamClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.DatastreamGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.get_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_datastream_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatastreamTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetConnectionProfileRequest.pb( + datastream.GetConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.ConnectionProfile.to_json( + datastream_resources.ConnectionProfile() ) + request = datastream.GetConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.ConnectionProfile() -def test_datastream_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.datastream_v1alpha1.services.datastream.transports.DatastreamTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DatastreamTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.get_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.GetConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection_profile(request) + + +def test_get_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.ConnectionProfile() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.ConnectionProfile.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_get_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection_profile( + datastream.GetConnectionProfileRequest(), + name="name_value", + ) + + +def test_get_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateConnectionProfileRequest, + dict, + ], +) +def test_create_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection_profile"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": { + "bucket_name": "bucket_name_value", + "root_path": "root_path_value", + }, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "no_connectivity": {}, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": { + "private_connection_name": "private_connection_name_value" + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_connection_profile_rest_required_fields( + request_type=datastream.CreateConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["connection_profile_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "connectionProfileId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "connectionProfileId" in jsonified_request + assert ( + jsonified_request["connectionProfileId"] + == request_init["connection_profile_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["connectionProfileId"] = "connection_profile_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "connection_profile_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "connectionProfileId" in jsonified_request + assert jsonified_request["connectionProfileId"] == "connection_profile_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_connection_profile(request) + + expected_params = [ + ( + "connectionProfileId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "connectionProfileId", + "requestId", + ) + ) + & set( + ( + "parent", + "connectionProfileId", + "connectionProfile", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateConnectionProfileRequest.pb( + datastream.CreateConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection_profile"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": { + "bucket_name": "bucket_name_value", + "root_path": "root_path_value", + }, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "no_connectivity": {}, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": { + "private_connection_name": "private_connection_name_value" + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection_profile(request) + + +def test_create_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + connection_profile_id="connection_profile_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/connectionProfiles" + % client.transport._host, + args[1], + ) + + +def test_create_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection_profile( + datastream.CreateConnectionProfileRequest(), + parent="parent_value", + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + connection_profile_id="connection_profile_id_value", + ) + + +def test_create_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.UpdateConnectionProfileRequest, + dict, + ], +) +def test_update_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + request_init["connection_profile"] = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": { + "bucket_name": "bucket_name_value", + "root_path": "root_path_value", + }, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "no_connectivity": {}, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": { + "private_connection_name": "private_connection_name_value" + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_connection_profile_rest_required_fields( + request_type=datastream.UpdateConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("connectionProfile",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_update_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.UpdateConnectionProfileRequest.pb( + datastream.UpdateConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.UpdateConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.UpdateConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + request_init["connection_profile"] = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "oracle_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "database_service": "database_service_value", + "connection_attributes": {}, + }, + "gcs_profile": { + "bucket_name": "bucket_name_value", + "root_path": "root_path_value", + }, + "mysql_profile": { + "hostname": "hostname_value", + "port": 453, + "username": "username_value", + "password": "password_value", + "ssl_config": { + "client_key": "client_key_value", + "client_key_set": True, + "client_certificate": "client_certificate_value", + "client_certificate_set": True, + "ca_certificate": "ca_certificate_value", + "ca_certificate_set": True, + }, + }, + "no_connectivity": {}, + "static_service_ip_connectivity": {}, + "forward_ssh_connectivity": { + "hostname": "hostname_value", + "username": "username_value", + "port": 453, + "password": "password_value", + "private_key": "private_key_value", + }, + "private_connectivity": { + "private_connection_name": "private_connection_name_value" + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection_profile(request) + + +def test_update_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "connection_profile": { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{connection_profile.name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_update_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection_profile( + datastream.UpdateConnectionProfileRequest(), + connection_profile=datastream_resources.ConnectionProfile( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteConnectionProfileRequest, + dict, + ], +) +def test_delete_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_connection_profile(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_connection_profile_rest_required_fields( + request_type=datastream.DeleteConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection_profile._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteConnectionProfileRequest.pb( + datastream.DeleteConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection_profile(request) + + +def test_delete_connection_profile_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connectionProfiles/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_connection_profile(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/connectionProfiles/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_connection_profile_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection_profile( + datastream.DeleteConnectionProfileRequest(), + name="name_value", + ) + + +def test_delete_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DiscoverConnectionProfileRequest, + dict, + ], +) +def test_discover_connection_profile_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.DiscoverConnectionProfileResponse( + oracle_rdbms=datastream_resources.OracleRdbms( + oracle_schemas=[ + datastream_resources.OracleSchema(schema_name="schema_name_value") + ] + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.DiscoverConnectionProfileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.discover_connection_profile(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream.DiscoverConnectionProfileResponse) + + +def test_discover_connection_profile_rest_required_fields( + request_type=datastream.DiscoverConnectionProfileRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).discover_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).discover_connection_profile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.DiscoverConnectionProfileResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.DiscoverConnectionProfileResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.discover_connection_profile(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_discover_connection_profile_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.discover_connection_profile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_discover_connection_profile_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_discover_connection_profile" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_discover_connection_profile" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DiscoverConnectionProfileRequest.pb( + datastream.DiscoverConnectionProfileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + datastream.DiscoverConnectionProfileResponse.to_json( + datastream.DiscoverConnectionProfileResponse() + ) + ) + + request = datastream.DiscoverConnectionProfileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.DiscoverConnectionProfileResponse() + + client.discover_connection_profile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_discover_connection_profile_rest_bad_request( + transport: str = "rest", request_type=datastream.DiscoverConnectionProfileRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.discover_connection_profile(request) + + +def test_discover_connection_profile_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListStreamsRequest, + dict, + ], +) +def test_list_streams_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_streams(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStreamsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_streams_rest_required_fields(request_type=datastream.ListStreamsRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_streams._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_streams._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_streams(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_streams_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_streams._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_streams_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_streams" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_streams" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListStreamsRequest.pb(datastream.ListStreamsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListStreamsResponse.to_json( + datastream.ListStreamsResponse() + ) + + request = datastream.ListStreamsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListStreamsResponse() + + client.list_streams( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_streams_rest_bad_request( + transport: str = "rest", request_type=datastream.ListStreamsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_streams(request) + + +def test_list_streams_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListStreamsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListStreamsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_streams(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/streams" + % client.transport._host, + args[1], + ) + + +def test_list_streams_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_streams( + datastream.ListStreamsRequest(), + parent="parent_value", + ) + + +def test_list_streams_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + datastream_resources.Stream(), + datastream_resources.Stream(), + ], + next_page_token="abc", + ), + datastream.ListStreamsResponse( + streams=[], + next_page_token="def", + ), + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + ], + next_page_token="ghi", + ), + datastream.ListStreamsResponse( + streams=[ + datastream_resources.Stream(), + datastream_resources.Stream(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.ListStreamsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_streams(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datastream_resources.Stream) for i in results) + + pages = list(client.list_streams(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetStreamRequest, + dict, + ], +) +def test_get_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream( + name="name_value", + display_name="display_name_value", + state=datastream_resources.Stream.State.CREATED, + backfill_all=datastream_resources.Stream.BackfillAllStrategy( + oracle_excluded_objects=datastream_resources.OracleRdbms( + oracle_schemas=[ + datastream_resources.OracleSchema( + schema_name="schema_name_value" + ) + ] + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stream(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.Stream) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == datastream_resources.Stream.State.CREATED + + +def test_get_stream_rest_required_fields(request_type=datastream.GetStreamRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetStreamRequest.pb(datastream.GetStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.Stream.to_json( + datastream_resources.Stream() + ) + + request = datastream.GetStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.Stream() + + client.get_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.GetStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stream(request) + + +def test_get_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Stream() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/streams/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Stream.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/streams/*}" + % client.transport._host, + args[1], + ) + + +def test_get_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stream( + datastream.GetStreamRequest(), + name="name_value", + ) + + +def test_get_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateStreamRequest, + dict, + ], +) +def test_create_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["stream"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile_name": "source_connection_profile_name_value", + "oracle_source_config": { + "allowlist": { + "oracle_schemas": [ + { + "schema_name": "schema_name_value", + "oracle_tables": [ + { + "table_name": "table_name_value", + "oracle_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + "mysql_source_config": { + "allowlist": { + "mysql_databases": [ + { + "database_name": "database_name_value", + "mysql_tables": [ + { + "table_name": "table_name_value", + "mysql_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + }, + "destination_config": { + "destination_connection_profile_name": "destination_connection_profile_name_value", + "gcs_destination_config": { + "path": "path_value", + "gcs_file_format": 1, + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + }, + "state": 1, + "backfill_all": {"oracle_excluded_objects": {}, "mysql_excluded_objects": {}}, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_stream_rest_required_fields( + request_type=datastream.CreateStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["stream_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "streamId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "streamId" in jsonified_request + assert jsonified_request["streamId"] == request_init["stream_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["streamId"] = "stream_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "stream_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "streamId" in jsonified_request + assert jsonified_request["streamId"] == "stream_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_stream(request) + + expected_params = [ + ( + "streamId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_stream._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "streamId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "streamId", + "stream", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateStreamRequest.pb(datastream.CreateStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["stream"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile_name": "source_connection_profile_name_value", + "oracle_source_config": { + "allowlist": { + "oracle_schemas": [ + { + "schema_name": "schema_name_value", + "oracle_tables": [ + { + "table_name": "table_name_value", + "oracle_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + "mysql_source_config": { + "allowlist": { + "mysql_databases": [ + { + "database_name": "database_name_value", + "mysql_tables": [ + { + "table_name": "table_name_value", + "mysql_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + }, + "destination_config": { + "destination_connection_profile_name": "destination_connection_profile_name_value", + "gcs_destination_config": { + "path": "path_value", + "gcs_file_format": 1, + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + }, + "state": 1, + "backfill_all": {"oracle_excluded_objects": {}, "mysql_excluded_objects": {}}, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stream(request) + + +def test_create_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + stream=datastream_resources.Stream(name="name_value"), + stream_id="stream_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/streams" + % client.transport._host, + args[1], + ) + + +def test_create_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stream( + datastream.CreateStreamRequest(), + parent="parent_value", + stream=datastream_resources.Stream(name="name_value"), + stream_id="stream_id_value", + ) + + +def test_create_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.UpdateStreamRequest, + dict, + ], +) +def test_update_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + request_init["stream"] = { + "name": "projects/sample1/locations/sample2/streams/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile_name": "source_connection_profile_name_value", + "oracle_source_config": { + "allowlist": { + "oracle_schemas": [ + { + "schema_name": "schema_name_value", + "oracle_tables": [ + { + "table_name": "table_name_value", + "oracle_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + "mysql_source_config": { + "allowlist": { + "mysql_databases": [ + { + "database_name": "database_name_value", + "mysql_tables": [ + { + "table_name": "table_name_value", + "mysql_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + }, + "destination_config": { + "destination_connection_profile_name": "destination_connection_profile_name_value", + "gcs_destination_config": { + "path": "path_value", + "gcs_file_format": 1, + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + }, + "state": 1, + "backfill_all": {"oracle_excluded_objects": {}, "mysql_excluded_objects": {}}, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_stream_rest_required_fields( + request_type=datastream.UpdateStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_stream._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set(("stream",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_update_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.UpdateStreamRequest.pb(datastream.UpdateStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.UpdateStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.UpdateStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + request_init["stream"] = { + "name": "projects/sample1/locations/sample2/streams/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "source_config": { + "source_connection_profile_name": "source_connection_profile_name_value", + "oracle_source_config": { + "allowlist": { + "oracle_schemas": [ + { + "schema_name": "schema_name_value", + "oracle_tables": [ + { + "table_name": "table_name_value", + "oracle_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "precision": 972, + "scale": 520, + "encoding": "encoding_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + "mysql_source_config": { + "allowlist": { + "mysql_databases": [ + { + "database_name": "database_name_value", + "mysql_tables": [ + { + "table_name": "table_name_value", + "mysql_columns": [ + { + "column_name": "column_name_value", + "data_type": "data_type_value", + "length": 642, + "collation": "collation_value", + "primary_key": True, + "nullable": True, + "ordinal_position": 1725, + } + ], + } + ], + } + ] + }, + "rejectlist": {}, + }, + }, + "destination_config": { + "destination_connection_profile_name": "destination_connection_profile_name_value", + "gcs_destination_config": { + "path": "path_value", + "gcs_file_format": 1, + "file_rotation_mb": 1693, + "file_rotation_interval": {"seconds": 751, "nanos": 543}, + "avro_file_format": {}, + "json_file_format": {"schema_file_format": 1, "compression": 1}, + }, + }, + "state": 1, + "backfill_all": {"oracle_excluded_objects": {}, "mysql_excluded_objects": {}}, + "backfill_none": {}, + "errors": [ + { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stream(request) + + +def test_update_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "stream": {"name": "projects/sample1/locations/sample2/streams/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + stream=datastream_resources.Stream(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{stream.name=projects/*/locations/*/streams/*}" + % client.transport._host, + args[1], + ) + + +def test_update_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stream( + datastream.UpdateStreamRequest(), + stream=datastream_resources.Stream(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteStreamRequest, + dict, + ], +) +def test_delete_stream_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_stream(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_stream_rest_required_fields( + request_type=datastream.DeleteStreamRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stream._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stream._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_stream(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_stream_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_stream._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stream_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_stream" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_stream" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteStreamRequest.pb(datastream.DeleteStreamRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteStreamRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_stream( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_stream_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteStreamRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stream(request) + + +def test_delete_stream_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/streams/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_stream(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/streams/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_stream_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stream( + datastream.DeleteStreamRequest(), + name="name_value", + ) + + +def test_delete_stream_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.FetchErrorsRequest, + dict, + ], +) +def test_fetch_errors_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"stream": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_errors(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_errors_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_errors" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_fetch_errors" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.FetchErrorsRequest.pb(datastream.FetchErrorsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.FetchErrorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.fetch_errors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_errors_rest_bad_request( + transport: str = "rest", request_type=datastream.FetchErrorsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"stream": "projects/sample1/locations/sample2/streams/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_errors(request) + + +def test_fetch_errors_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.FetchStaticIpsRequest, + dict, + ], +) +def test_fetch_static_ips_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_static_ips(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_static_ips_rest_required_fields( + request_type=datastream.FetchStaticIpsRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_static_ips._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_static_ips._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_static_ips(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_static_ips_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_static_ips._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_static_ips_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_static_ips" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_fetch_static_ips" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.FetchStaticIpsRequest.pb( + datastream.FetchStaticIpsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.FetchStaticIpsResponse.to_json( + datastream.FetchStaticIpsResponse() + ) + + request = datastream.FetchStaticIpsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.FetchStaticIpsResponse() + + client.fetch_static_ips( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_static_ips_rest_bad_request( + transport: str = "rest", request_type=datastream.FetchStaticIpsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_static_ips(request) + + +def test_fetch_static_ips_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.FetchStaticIpsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.FetchStaticIpsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_static_ips(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*}:fetchStaticIps" + % client.transport._host, + args[1], + ) + + +def test_fetch_static_ips_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + datastream.FetchStaticIpsRequest(), + name="name_value", + ) + + +def test_fetch_static_ips_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + datastream.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + datastream.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.FetchStaticIpsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "projects/sample1/locations/sample2"} + + pager = client.fetch_static_ips(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.fetch_static_ips(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreatePrivateConnectionRequest, + dict, + ], +) +def test_create_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_connection"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "error": { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + }, + "vpc_peering_config": {"vpc_name": "vpc_name_value", "subnet": "subnet_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_private_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_private_connection_rest_required_fields( + request_type=datastream.CreatePrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["private_connection_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "privateConnectionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "privateConnectionId" in jsonified_request + assert ( + jsonified_request["privateConnectionId"] + == request_init["private_connection_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["privateConnectionId"] = "private_connection_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_private_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "private_connection_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "privateConnectionId" in jsonified_request + assert jsonified_request["privateConnectionId"] == "private_connection_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_private_connection(request) + + expected_params = [ + ( + "privateConnectionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "privateConnectionId", + "requestId", + ) + ) + & set( + ( + "parent", + "privateConnectionId", + "privateConnection", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreatePrivateConnectionRequest.pb( + datastream.CreatePrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreatePrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.CreatePrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["private_connection"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "state": 1, + "error": { + "reason": "reason_value", + "error_uuid": "error_uuid_value", + "message": "message_value", + "error_time": {}, + "details": {}, + }, + "vpc_peering_config": {"vpc_name": "vpc_name_value", "subnet": "subnet_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_private_connection(request) + + +def test_create_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + private_connection=datastream_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/privateConnections" + % client.transport._host, + args[1], + ) + + +def test_create_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + datastream.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=datastream_resources.PrivateConnection( + name="name_value" + ), + private_connection_id="private_connection_id_value", + ) + + +def test_create_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetPrivateConnectionRequest, + dict, + ], +) +def test_get_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=datastream_resources.PrivateConnection.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_private_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == datastream_resources.PrivateConnection.State.CREATING + + +def test_get_private_connection_rest_required_fields( + request_type=datastream.GetPrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_private_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetPrivateConnectionRequest.pb( + datastream.GetPrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.PrivateConnection.to_json( + datastream_resources.PrivateConnection() + ) + + request = datastream.GetPrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.PrivateConnection() + + client.get_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.GetPrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_private_connection(request) + + +def test_get_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.PrivateConnection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.PrivateConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/privateConnections/*}" + % client.transport._host, + args[1], + ) + + +def test_get_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + datastream.GetPrivateConnectionRequest(), + name="name_value", + ) + + +def test_get_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListPrivateConnectionsRequest, + dict, + ], +) +def test_list_private_connections_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_private_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_connections_rest_required_fields( + request_type=datastream.ListPrivateConnectionsRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_private_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_private_connections(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_private_connections_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_private_connections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_private_connections_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_private_connections" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_private_connections" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListPrivateConnectionsRequest.pb( + datastream.ListPrivateConnectionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListPrivateConnectionsResponse.to_json( + datastream.ListPrivateConnectionsResponse() + ) + + request = datastream.ListPrivateConnectionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListPrivateConnectionsResponse() + + client.list_private_connections( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_private_connections_rest_bad_request( + transport: str = "rest", request_type=datastream.ListPrivateConnectionsRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_private_connections(request) + + +def test_list_private_connections_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListPrivateConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListPrivateConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_private_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*}/privateConnections" + % client.transport._host, + args[1], + ) + + +def test_list_private_connections_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + datastream.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_private_connections_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + datastream.ListPrivateConnectionsResponse( + private_connections=[ + datastream_resources.PrivateConnection(), + datastream_resources.PrivateConnection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datastream.ListPrivateConnectionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_private_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datastream_resources.PrivateConnection) for i in results + ) + + pages = list(client.list_private_connections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeletePrivateConnectionRequest, + dict, + ], +) +def test_delete_private_connection_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_private_connection(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_private_connection_rest_required_fields( + request_type=datastream.DeletePrivateConnectionRequest, +): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_private_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_private_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_private_connection_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_private_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_private_connection_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_private_connection" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_private_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeletePrivateConnectionRequest.pb( + datastream.DeletePrivateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeletePrivateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_private_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_private_connection_rest_bad_request( + transport: str = "rest", request_type=datastream.DeletePrivateConnectionRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_private_connection(request) + + +def test_delete_private_connection_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_private_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/privateConnections/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_private_connection_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + datastream.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +def test_delete_private_connection_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.CreateRouteRequest, + dict, + ], +) +def test_create_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request_init["route"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "destination_address": "destination_address_value", + "destination_port": 1734, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_route(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_route_rest_required_fields(request_type=datastream.CreateRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["route_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "routeId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "routeId" in jsonified_request + assert jsonified_request["routeId"] == request_init["route_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["routeId"] = "route_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_route._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "route_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "routeId" in jsonified_request + assert jsonified_request["routeId"] == "route_id_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_route(request) + + expected_params = [ + ( + "routeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_route._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "routeId", + ) + ) + & set( + ( + "parent", + "routeId", + "route", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_create_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.CreateRouteRequest.pb(datastream.CreateRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.CreateRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_route_rest_bad_request( + transport: str = "rest", request_type=datastream.CreateRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request_init["route"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "display_name": "display_name_value", + "destination_address": "destination_address_value", + "destination_port": 1734, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_route(request) + + +def test_create_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + route=datastream_resources.Route(name="name_value"), + route_id="route_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/privateConnections/*}/routes" + % client.transport._host, + args[1], + ) + + +def test_create_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_route( + datastream.CreateRouteRequest(), + parent="parent_value", + route=datastream_resources.Route(name="name_value"), + route_id="route_id_value", + ) + + +def test_create_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.GetRouteRequest, + dict, + ], +) +def test_get_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route( + name="name_value", + display_name="display_name_value", + destination_address="destination_address_value", + destination_port=1734, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_route(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datastream_resources.Route) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.destination_address == "destination_address_value" + assert response.destination_port == 1734 + + +def test_get_route_rest_required_fields(request_type=datastream.GetRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_route(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_route._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_get_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.GetRouteRequest.pb(datastream.GetRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream_resources.Route.to_json( + datastream_resources.Route() + ) + + request = datastream.GetRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream_resources.Route() + + client.get_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_route_rest_bad_request( + transport: str = "rest", request_type=datastream.GetRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_route(request) + + +def test_get_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream_resources.Route() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream_resources.Route.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/privateConnections/*/routes/*}" + % client.transport._host, + args[1], + ) + + +def test_get_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_route( + datastream.GetRouteRequest(), + name="name_value", + ) + + +def test_get_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.ListRoutesRequest, + dict, + ], +) +def test_list_routes_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_routes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRoutesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_routes_rest_required_fields(request_type=datastream.ListRoutesRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_routes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_routes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_routes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_routes_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_routes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_routes_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_routes" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_list_routes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.ListRoutesRequest.pb(datastream.ListRoutesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datastream.ListRoutesResponse.to_json( + datastream.ListRoutesResponse() + ) + + request = datastream.ListRoutesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datastream.ListRoutesResponse() + + client.list_routes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_routes_rest_bad_request( + transport: str = "rest", request_type=datastream.ListRoutesRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_routes(request) + + +def test_list_routes_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datastream.ListRoutesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datastream.ListRoutesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_routes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{parent=projects/*/locations/*/privateConnections/*}/routes" + % client.transport._host, + args[1], + ) + + +def test_list_routes_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_routes( + datastream.ListRoutesRequest(), + parent="parent_value", + ) + + +def test_list_routes_rest_pager(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + datastream_resources.Route(), + datastream_resources.Route(), + ], + next_page_token="abc", + ), + datastream.ListRoutesResponse( + routes=[], + next_page_token="def", + ), + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + ], + next_page_token="ghi", + ), + datastream.ListRoutesResponse( + routes=[ + datastream_resources.Route(), + datastream_resources.Route(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(datastream.ListRoutesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/privateConnections/sample3" + } + + pager = client.list_routes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datastream_resources.Route) for i in results) + + pages = list(client.list_routes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datastream.DeleteRouteRequest, + dict, + ], +) +def test_delete_route_rest(request_type): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_route(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_route_rest_required_fields(request_type=datastream.DeleteRouteRequest): + transport_class = transports.DatastreamRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_route._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_route._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_route(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_route_rest_unset_required_fields(): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_route._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_route_rest_interceptors(null_interceptor): + transport = transports.DatastreamRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DatastreamRestInterceptor(), + ) + client = DatastreamClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_route" + ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "pre_delete_route" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datastream.DeleteRouteRequest.pb(datastream.DeleteRouteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = datastream.DeleteRouteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_route( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_route_rest_bad_request( + transport: str = "rest", request_type=datastream.DeleteRouteRequest +): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_route(request) + + +def test_delete_route_rest_flattened(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/privateConnections/sample3/routes/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_route(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha1/{name=projects/*/locations/*/privateConnections/*/routes/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_route_rest_flattened_error(transport: str = "rest"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_route( + datastream.DeleteRouteRequest(), + name="name_value", + ) + + +def test_delete_route_rest_error(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastreamClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastreamClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatastreamClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatastreamClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatastreamGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatastreamGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatastreamGrpcTransport, + transports.DatastreamGrpcAsyncIOTransport, + transports.DatastreamRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DatastreamClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatastreamGrpcTransport, + ) + + +def test_datastream_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatastreamTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_datastream_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datastream_v1alpha1.services.datastream.transports.DatastreamTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DatastreamTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( "list_connection_profiles", "get_connection_profile", "create_connection_profile", @@ -6859,6 +13525,7 @@ def test_datastream_transport_auth_adc(transport_class): [ transports.DatastreamGrpcTransport, transports.DatastreamGrpcAsyncIOTransport, + transports.DatastreamRestTransport, ], ) def test_datastream_transport_auth_gdch_credentials(transport_class): @@ -6953,11 +13620,40 @@ def test_datastream_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_datastream_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DatastreamRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_datastream_rest_lro_client(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastream_host_no_port(transport_name): @@ -6968,7 +13664,11 @@ def test_datastream_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastream.googleapis.com:443") + assert client.transport._host == ( + "datastream.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com" + ) @pytest.mark.parametrize( @@ -6976,6 +13676,7 @@ def test_datastream_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_datastream_host_with_port(transport_name): @@ -6986,7 +13687,93 @@ def test_datastream_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("datastream.googleapis.com:8000") + assert client.transport._host == ( + "datastream.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://datastream.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_datastream_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatastreamClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatastreamClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_connection_profiles._session + session2 = client2.transport.list_connection_profiles._session + assert session1 != session2 + session1 = client1.transport.get_connection_profile._session + session2 = client2.transport.get_connection_profile._session + assert session1 != session2 + session1 = client1.transport.create_connection_profile._session + session2 = client2.transport.create_connection_profile._session + assert session1 != session2 + session1 = client1.transport.update_connection_profile._session + session2 = client2.transport.update_connection_profile._session + assert session1 != session2 + session1 = client1.transport.delete_connection_profile._session + session2 = client2.transport.delete_connection_profile._session + assert session1 != session2 + session1 = client1.transport.discover_connection_profile._session + session2 = client2.transport.discover_connection_profile._session + assert session1 != session2 + session1 = client1.transport.list_streams._session + session2 = client2.transport.list_streams._session + assert session1 != session2 + session1 = client1.transport.get_stream._session + session2 = client2.transport.get_stream._session + assert session1 != session2 + session1 = client1.transport.create_stream._session + session2 = client2.transport.create_stream._session + assert session1 != session2 + session1 = client1.transport.update_stream._session + session2 = client2.transport.update_stream._session + assert session1 != session2 + session1 = client1.transport.delete_stream._session + session2 = client2.transport.delete_stream._session + assert session1 != session2 + session1 = client1.transport.fetch_errors._session + session2 = client2.transport.fetch_errors._session + assert session1 != session2 + session1 = client1.transport.fetch_static_ips._session + session2 = client2.transport.fetch_static_ips._session + assert session1 != session2 + session1 = client1.transport.create_private_connection._session + session2 = client2.transport.create_private_connection._session + assert session1 != session2 + session1 = client1.transport.get_private_connection._session + session2 = client2.transport.get_private_connection._session + assert session1 != session2 + session1 = client1.transport.list_private_connections._session + session2 = client2.transport.list_private_connections._session + assert session1 != session2 + session1 = client1.transport.delete_private_connection._session + session2 = client2.transport.delete_private_connection._session + assert session1 != session2 + session1 = client1.transport.create_route._session + session2 = client2.transport.create_route._session + assert session1 != session2 + session1 = client1.transport.get_route._session + session2 = client2.transport.get_route._session + assert session1 != session2 + session1 = client1.transport.list_routes._session + session2 = client2.transport.list_routes._session + assert session1 != session2 + session1 = client1.transport.delete_route._session + session2 = client2.transport.delete_route._session + assert session1 != session2 def test_datastream_grpc_transport_channel(): @@ -7394,6 +14181,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -7411,6 +14199,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: