diff --git a/packages/google-cloud-config/google/cloud/config/__init__.py b/packages/google-cloud-config/google/cloud/config/__init__.py index b86f5ddc9ec0..ebea67cebc62 100644 --- a/packages/google-cloud-config/google/cloud/config/__init__.py +++ b/packages/google-cloud-config/google/cloud/config/__init__.py @@ -23,20 +23,27 @@ from google.cloud.config_v1.types.config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -44,6 +51,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -62,20 +73,27 @@ "ConfigAsyncClient", "ApplyResults", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -83,6 +101,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index cf99f3acb1ee..360a0d13ebdd 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/__init__.py index a407a3aa0ff8..0dd38e23e39f 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/__init__.py @@ -22,20 +22,27 @@ from .types.config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -43,6 +50,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -61,20 +72,27 @@ "ApplyResults", "ConfigClient", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -82,6 +100,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json index 65de7b78a0aa..190bb0825b1c 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json @@ -15,11 +15,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -35,6 +45,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -45,6 +60,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -65,6 +85,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" @@ -100,11 +125,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -120,6 +155,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -130,6 +170,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -150,6 +195,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" @@ -185,11 +235,21 @@ "create_deployment" ] }, + "CreatePreview": { + "methods": [ + "create_preview" + ] + }, "DeleteDeployment": { "methods": [ "delete_deployment" ] }, + "DeletePreview": { + "methods": [ + "delete_preview" + ] + }, "DeleteStatefile": { "methods": [ "delete_statefile" @@ -205,6 +265,11 @@ "export_lock_info" ] }, + "ExportPreviewResult": { + "methods": [ + "export_preview_result" + ] + }, "ExportRevisionStatefile": { "methods": [ "export_revision_statefile" @@ -215,6 +280,11 @@ "get_deployment" ] }, + "GetPreview": { + "methods": [ + "get_preview" + ] + }, "GetResource": { "methods": [ "get_resource" @@ -235,6 +305,11 @@ "list_deployments" ] }, + "ListPreviews": { + "methods": [ + "list_previews" + ] + }, "ListResources": { "methods": [ "list_resources" diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index cf99f3acb1ee..360a0d13ebdd 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 8f6ab9857596..e6752efa8a85 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -50,6 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.config_v1.services.config import pagers from google.cloud.config_v1.types import config @@ -72,6 +73,8 @@ class ConfigAsyncClient: deployment_path = staticmethod(ConfigClient.deployment_path) parse_deployment_path = staticmethod(ConfigClient.parse_deployment_path) + preview_path = staticmethod(ConfigClient.preview_path) + parse_preview_path = staticmethod(ConfigClient.parse_preview_path) resource_path = staticmethod(ConfigClient.resource_path) parse_resource_path = staticmethod(ConfigClient.parse_resource_path) revision_path = staticmethod(ConfigClient.revision_path) @@ -1997,6 +2000,553 @@ async def sample_export_lock_info(): # Done; return the response. return response + async def create_preview( + self, + request: Optional[Union[config.CreatePreviewRequest, dict]] = None, + *, + parent: Optional[str] = None, + preview: Optional[config.Preview] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_create_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.CreatePreviewRequest, dict]]): + The request object. A request to create a preview. + parent (:class:`str`): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + preview (:class:`google.cloud.config_v1.types.Preview`): + Required. [Preview][google.cloud.config.v1.Preview] + resource to be created. + + This corresponds to the ``preview`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, preview]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.CreatePreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if preview is not None: + request.preview = preview + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_preview( + self, + request: Optional[Union[config.GetPreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Gets details about a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_preview(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetPreviewRequest, dict]]): + The request object. A request to get details about a + preview. + name (:class:`str`): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.GetPreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_previews( + self, + request: Optional[Union[config.ListPreviewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPreviewsAsyncPager: + r"""Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_previews(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListPreviewsRequest, dict]]): + The request object. A request to list all previews for a + given project and location. + parent (:class:`str`): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListPreviewsAsyncPager: + A response to a ListPreviews call. Contains a list of + Previews. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.ListPreviewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_previews, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPreviewsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_preview( + self, + request: Optional[Union[config.DeletePreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_delete_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeletePreviewRequest, dict]]): + The request object. A request to delete a preview. + name (:class:`str`): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = config.DeletePreviewRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_preview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_preview_result( + self, + request: Optional[Union[config.ExportPreviewResultRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Export [Preview][google.cloud.config.v1.Preview] results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_preview_result(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ExportPreviewResultRequest, dict]]): + The request object. A request to export preview results. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.ExportPreviewResultResponse: + A response to ExportPreviewResult call. Contains preview + results. + + """ + # Create or coerce a protobuf request object. + request = config.ExportPreviewResultRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_preview_result, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index d3ff68354028..354b94abeb93 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -54,6 +54,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.cloud.config_v1.services.config import pagers from google.cloud.config_v1.types import config @@ -211,6 +212,28 @@ def parse_deployment_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def preview_path( + project: str, + location: str, + preview: str, + ) -> str: + """Returns a fully-qualified preview string.""" + return "projects/{project}/locations/{location}/previews/{preview}".format( + project=project, + location=location, + preview=preview, + ) + + @staticmethod + def parse_preview_path(path: str) -> Dict[str, str]: + """Parses a preview path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/previews/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def resource_path( project: str, @@ -2328,6 +2351,554 @@ def sample_export_lock_info(): # Done; return the response. return response + def create_preview( + self, + request: Optional[Union[config.CreatePreviewRequest, dict]] = None, + *, + parent: Optional[str] = None, + preview: Optional[config.Preview] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_create_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.CreatePreviewRequest, dict]): + The request object. A request to create a preview. + parent (str): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + preview (google.cloud.config_v1.types.Preview): + Required. [Preview][google.cloud.config.v1.Preview] + resource to be created. + + This corresponds to the ``preview`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, preview]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.CreatePreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.CreatePreviewRequest): + request = config.CreatePreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if preview is not None: + request.preview = preview + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_preview( + self, + request: Optional[Union[config.GetPreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Gets details about a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = client.get_preview(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetPreviewRequest, dict]): + The request object. A request to get details about a + preview. + name (str): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.GetPreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.GetPreviewRequest): + request = config.GetPreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_previews( + self, + request: Optional[Union[config.ListPreviewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPreviewsPager: + r"""Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_previews(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListPreviewsRequest, dict]): + The request object. A request to list all previews for a + given project and location. + parent (str): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.services.config.pagers.ListPreviewsPager: + A response to a ListPreviews call. Contains a list of + Previews. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.ListPreviewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ListPreviewsRequest): + request = config.ListPreviewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_previews] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPreviewsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_preview( + self, + request: Optional[Union[config.DeletePreviewRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a [Preview][google.cloud.config.v1.Preview]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_delete_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.DeletePreviewRequest, dict]): + The request object. A request to delete a preview. + name (str): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.Preview` A preview represents a set of actions Infra Manager would perform + to move the resources towards the desired state as + specified in the configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a config.DeletePreviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.DeletePreviewRequest): + request = config.DeletePreviewRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_preview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.Preview, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_preview_result( + self, + request: Optional[Union[config.ExportPreviewResultRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Export [Preview][google.cloud.config.v1.Preview] results. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_preview_result(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ExportPreviewResultRequest, dict]): + The request object. A request to export preview results. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.config_v1.types.ExportPreviewResultResponse: + A response to ExportPreviewResult call. Contains preview + results. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a config.ExportPreviewResultRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, config.ExportPreviewResultRequest): + request = config.ExportPreviewResultRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_preview_result] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConfigClient": return self diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py index d0ccc958ea38..8f74585f587d 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py @@ -409,3 +409,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPreviewsPager: + """A pager for iterating through ``list_previews`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListPreviewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``previews`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPreviews`` requests and continue to iterate + through the ``previews`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListPreviewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListPreviewsResponse], + request: config.ListPreviewsRequest, + response: config.ListPreviewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListPreviewsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListPreviewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListPreviewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListPreviewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[config.Preview]: + for page in self.pages: + yield from page.previews + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPreviewsAsyncPager: + """A pager for iterating through ``list_previews`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListPreviewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``previews`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPreviews`` requests and continue to iterate + through the ``previews`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListPreviewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListPreviewsResponse]], + request: config.ListPreviewsRequest, + response: config.ListPreviewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListPreviewsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListPreviewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = config.ListPreviewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListPreviewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.Preview]: + async def async_generator(): + async for page in self.pages: + for response in page.previews: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py index a5139f21b4ce..5ca64f211d1a 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py @@ -207,6 +207,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_preview: gapic_v1.method.wrap_method( + self.create_preview, + default_timeout=None, + client_info=client_info, + ), + self.get_preview: gapic_v1.method.wrap_method( + self.get_preview, + default_timeout=None, + client_info=client_info, + ), + self.list_previews: gapic_v1.method.wrap_method( + self.list_previews, + default_timeout=None, + client_info=client_info, + ), + self.delete_preview: gapic_v1.method.wrap_method( + self.delete_preview, + default_timeout=None, + client_info=client_info, + ), + self.export_preview_result: gapic_v1.method.wrap_method( + self.export_preview_result, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -367,6 +392,53 @@ def export_lock_info( ]: raise NotImplementedError() + @property + def create_preview( + self, + ) -> Callable[ + [config.CreatePreviewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_preview( + self, + ) -> Callable[ + [config.GetPreviewRequest], Union[config.Preview, Awaitable[config.Preview]] + ]: + raise NotImplementedError() + + @property + def list_previews( + self, + ) -> Callable[ + [config.ListPreviewsRequest], + Union[config.ListPreviewsResponse, Awaitable[config.ListPreviewsResponse]], + ]: + raise NotImplementedError() + + @property + def delete_preview( + self, + ) -> Callable[ + [config.DeletePreviewRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], + Union[ + config.ExportPreviewResultResponse, + Awaitable[config.ExportPreviewResultResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py index 16b83b8ff3fa..da503d656520 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py @@ -670,6 +670,137 @@ def export_lock_info( ) return self._stubs["export_lock_info"] + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], operations_pb2.Operation]: + r"""Return a callable for the create preview method over gRPC. + + Creates a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.CreatePreviewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_preview" not in self._stubs: + self._stubs["create_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreatePreview", + request_serializer=config.CreatePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_preview"] + + @property + def get_preview(self) -> Callable[[config.GetPreviewRequest], config.Preview]: + r"""Return a callable for the get preview method over gRPC. + + Gets details about a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.GetPreviewRequest], + ~.Preview]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_preview" not in self._stubs: + self._stubs["get_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetPreview", + request_serializer=config.GetPreviewRequest.serialize, + response_deserializer=config.Preview.deserialize, + ) + return self._stubs["get_preview"] + + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], config.ListPreviewsResponse]: + r"""Return a callable for the list previews method over gRPC. + + Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + Returns: + Callable[[~.ListPreviewsRequest], + ~.ListPreviewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_previews" not in self._stubs: + self._stubs["list_previews"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListPreviews", + request_serializer=config.ListPreviewsRequest.serialize, + response_deserializer=config.ListPreviewsResponse.deserialize, + ) + return self._stubs["list_previews"] + + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], operations_pb2.Operation]: + r"""Return a callable for the delete preview method over gRPC. + + Deletes a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.DeletePreviewRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_preview" not in self._stubs: + self._stubs["delete_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeletePreview", + request_serializer=config.DeletePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_preview"] + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], config.ExportPreviewResultResponse + ]: + r"""Return a callable for the export preview result method over gRPC. + + Export [Preview][google.cloud.config.v1.Preview] results. + + Returns: + Callable[[~.ExportPreviewResultRequest], + ~.ExportPreviewResultResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_preview_result" not in self._stubs: + self._stubs["export_preview_result"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportPreviewResult", + request_serializer=config.ExportPreviewResultRequest.serialize, + response_deserializer=config.ExportPreviewResultResponse.deserialize, + ) + return self._stubs["export_preview_result"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py index 84eaa9ba992c..c02df0a05a20 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py @@ -695,6 +695,140 @@ def export_lock_info( ) return self._stubs["export_lock_info"] + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create preview method over gRPC. + + Creates a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.CreatePreviewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_preview" not in self._stubs: + self._stubs["create_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/CreatePreview", + request_serializer=config.CreatePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_preview"] + + @property + def get_preview( + self, + ) -> Callable[[config.GetPreviewRequest], Awaitable[config.Preview]]: + r"""Return a callable for the get preview method over gRPC. + + Gets details about a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.GetPreviewRequest], + Awaitable[~.Preview]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_preview" not in self._stubs: + self._stubs["get_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/GetPreview", + request_serializer=config.GetPreviewRequest.serialize, + response_deserializer=config.Preview.deserialize, + ) + return self._stubs["get_preview"] + + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], Awaitable[config.ListPreviewsResponse]]: + r"""Return a callable for the list previews method over gRPC. + + Lists [Preview][google.cloud.config.v1.Preview]s in a given + project and location. + + Returns: + Callable[[~.ListPreviewsRequest], + Awaitable[~.ListPreviewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_previews" not in self._stubs: + self._stubs["list_previews"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ListPreviews", + request_serializer=config.ListPreviewsRequest.serialize, + response_deserializer=config.ListPreviewsResponse.deserialize, + ) + return self._stubs["list_previews"] + + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete preview method over gRPC. + + Deletes a [Preview][google.cloud.config.v1.Preview]. + + Returns: + Callable[[~.DeletePreviewRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_preview" not in self._stubs: + self._stubs["delete_preview"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/DeletePreview", + request_serializer=config.DeletePreviewRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_preview"] + + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], + Awaitable[config.ExportPreviewResultResponse], + ]: + r"""Return a callable for the export preview result method over gRPC. + + Export [Preview][google.cloud.config.v1.Preview] results. + + Returns: + Callable[[~.ExportPreviewResultRequest], + Awaitable[~.ExportPreviewResultResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_preview_result" not in self._stubs: + self._stubs["export_preview_result"] = self.grpc_channel.unary_unary( + "/google.cloud.config.v1.Config/ExportPreviewResult", + request_serializer=config.ExportPreviewResultRequest.serialize, + response_deserializer=config.ExportPreviewResultResponse.deserialize, + ) + return self._stubs["export_preview_result"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py index 830a3d73360d..321e415304ea 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -83,6 +83,14 @@ def post_create_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_create_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_deployment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -91,6 +99,14 @@ def post_delete_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -111,6 +127,14 @@ def post_export_lock_info(self, response): logging.log(f"Received response: {response}") return response + def pre_export_preview_result(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_preview_result(self, response): + logging.log(f"Received response: {response}") + return response + def pre_export_revision_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +151,14 @@ def post_get_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_get_preview(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_preview(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_resource(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -159,6 +191,14 @@ def post_list_deployments(self, response): logging.log(f"Received response: {response}") return response + def pre_list_previews(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_previews(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_resources(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -228,6 +268,27 @@ def post_create_deployment( """ return response + def pre_create_preview( + self, request: config.CreatePreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.CreatePreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_create_preview( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_delete_deployment( self, request: config.DeleteDeploymentRequest, @@ -251,6 +312,27 @@ def post_delete_deployment( """ return response + def pre_delete_preview( + self, request: config.DeletePreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.DeletePreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_delete_preview( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_delete_statefile( self, request: config.DeleteStatefileRequest, @@ -305,6 +387,29 @@ def post_export_lock_info(self, response: config.LockInfo) -> config.LockInfo: """ return response + def pre_export_preview_result( + self, + request: config.ExportPreviewResultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[config.ExportPreviewResultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_preview_result + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_export_preview_result( + self, response: config.ExportPreviewResultResponse + ) -> config.ExportPreviewResultResponse: + """Post-rpc interceptor for export_preview_result + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_export_revision_statefile( self, request: config.ExportRevisionStatefileRequest, @@ -347,6 +452,25 @@ def post_get_deployment(self, response: config.Deployment) -> config.Deployment: """ return response + def pre_get_preview( + self, request: config.GetPreviewRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.GetPreviewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_preview + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_preview(self, response: config.Preview) -> config.Preview: + """Post-rpc interceptor for get_preview + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_get_resource( self, request: config.GetResourceRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[config.GetResourceRequest, Sequence[Tuple[str, str]]]: @@ -429,6 +553,27 @@ def post_list_deployments( """ return response + def pre_list_previews( + self, request: config.ListPreviewsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[config.ListPreviewsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_previews + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_previews( + self, response: config.ListPreviewsResponse + ) -> config.ListPreviewsResponse: + """Post-rpc interceptor for list_previews + + Override in a subclass to manipulate the response + after it is returned by the Config server but before + it is returned to user code. + """ + return response + def pre_list_resources( self, request: config.ListResourcesRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[config.ListResourcesRequest, Sequence[Tuple[str, str]]]: @@ -990,6 +1135,102 @@ def __call__( resp = self._interceptor.post_create_deployment(resp) return resp + class _CreatePreview(ConfigRestStub): + def __hash__(self): + return hash("CreatePreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.CreatePreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create preview method over HTTP. + + Args: + request (~.config.CreatePreviewRequest): + The request object. A request to create a preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/previews", + "body": "preview", + }, + ] + request, metadata = self._interceptor.pre_create_preview(request, metadata) + pb_request = config.CreatePreviewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_preview(resp) + return resp + class _DeleteDeployment(ConfigRestStub): def __hash__(self): return hash("DeleteDeployment") @@ -1037,10 +1278,97 @@ def __call__( "uri": "/v1/{name=projects/*/locations/*/deployments/*}", }, ] - request, metadata = self._interceptor.pre_delete_deployment( - request, metadata - ) - pb_request = config.DeleteDeploymentRequest.pb(request) + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) + pb_request = config.DeleteDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_deployment(resp) + return resp + + class _DeletePreview(ConfigRestStub): + def __hash__(self): + return hash("DeletePreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.DeletePreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete preview method over HTTP. + + Args: + request (~.config.DeletePreviewRequest): + The request object. A request to delete a preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/previews/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_preview(request, metadata) + pb_request = config.DeletePreviewRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -1076,7 +1404,7 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_deployment(resp) + resp = self._interceptor.post_delete_preview(resp) return resp class _DeleteStatefile(ConfigRestStub): @@ -1358,6 +1686,105 @@ def __call__( resp = self._interceptor.post_export_lock_info(resp) return resp + class _ExportPreviewResult(ConfigRestStub): + def __hash__(self): + return hash("ExportPreviewResult") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ExportPreviewResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Call the export preview result method over HTTP. + + Args: + request (~.config.ExportPreviewResultRequest): + The request object. A request to export preview results. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ExportPreviewResultResponse: + A response to ``ExportPreviewResult`` call. Contains + preview results. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/previews/*}:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_preview_result( + request, metadata + ) + pb_request = config.ExportPreviewResultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ExportPreviewResultResponse() + pb_resp = config.ExportPreviewResultResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_preview_result(resp) + return resp + class _ExportRevisionStatefile(ConfigRestStub): def __hash__(self): return hash("ExportRevisionStatefile") @@ -1548,6 +1975,97 @@ def __call__( resp = self._interceptor.post_get_deployment(resp) return resp + class _GetPreview(ConfigRestStub): + def __hash__(self): + return hash("GetPreview") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.GetPreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.Preview: + r"""Call the get preview method over HTTP. + + Args: + request (~.config.GetPreviewRequest): + The request object. A request to get details about a + preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/previews/*}", + }, + ] + request, metadata = self._interceptor.pre_get_preview(request, metadata) + pb_request = config.GetPreviewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Preview() + pb_resp = config.Preview.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_preview(resp) + return resp + class _GetResource(ConfigRestStub): def __hash__(self): return hash("GetResource") @@ -1919,6 +2437,95 @@ def __call__( resp = self._interceptor.post_list_deployments(resp) return resp + class _ListPreviews(ConfigRestStub): + def __hash__(self): + return hash("ListPreviews") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: config.ListPreviewsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> config.ListPreviewsResponse: + r"""Call the list previews method over HTTP. + + Args: + request (~.config.ListPreviewsRequest): + The request object. A request to list all previews for a + given project and location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.config.ListPreviewsResponse: + A response to a ``ListPreviews`` call. Contains a list + of Previews. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/previews", + }, + ] + request, metadata = self._interceptor.pre_list_previews(request, metadata) + pb_request = config.ListPreviewsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ListPreviewsResponse() + pb_resp = config.ListPreviewsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_previews(resp) + return resp + class _ListResources(ConfigRestStub): def __hash__(self): return hash("ListResources") @@ -2399,6 +3006,14 @@ def create_deployment( # In C++ this would require a dynamic_cast return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def create_preview( + self, + ) -> Callable[[config.CreatePreviewRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePreview(self._session, self._host, self._interceptor) # type: ignore + @property def delete_deployment( self, @@ -2407,6 +3022,14 @@ def delete_deployment( # In C++ this would require a dynamic_cast return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_preview( + self, + ) -> Callable[[config.DeletePreviewRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePreview(self._session, self._host, self._interceptor) # type: ignore + @property def delete_statefile( self, @@ -2431,6 +3054,16 @@ def export_lock_info( # In C++ this would require a dynamic_cast return self._ExportLockInfo(self._session, self._host, self._interceptor) # type: ignore + @property + def export_preview_result( + self, + ) -> Callable[ + [config.ExportPreviewResultRequest], config.ExportPreviewResultResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportPreviewResult(self._session, self._host, self._interceptor) # type: ignore + @property def export_revision_statefile( self, @@ -2447,6 +3080,12 @@ def get_deployment( # In C++ this would require a dynamic_cast return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def get_preview(self) -> Callable[[config.GetPreviewRequest], config.Preview]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPreview(self._session, self._host, self._interceptor) # type: ignore + @property def get_resource(self) -> Callable[[config.GetResourceRequest], config.Resource]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -2475,6 +3114,14 @@ def list_deployments( # In C++ this would require a dynamic_cast return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + @property + def list_previews( + self, + ) -> Callable[[config.ListPreviewsRequest], config.ListPreviewsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPreviews(self._session, self._host, self._interceptor) # type: ignore + @property def list_resources( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py index 5ce94d9c52f7..6c87a43cf63b 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py @@ -16,20 +16,27 @@ from .config import ( ApplyResults, CreateDeploymentRequest, + CreatePreviewRequest, DeleteDeploymentRequest, + DeletePreviewRequest, DeleteStatefileRequest, Deployment, DeploymentOperationMetadata, ExportDeploymentStatefileRequest, ExportLockInfoRequest, + ExportPreviewResultRequest, + ExportPreviewResultResponse, ExportRevisionStatefileRequest, GetDeploymentRequest, + GetPreviewRequest, GetResourceRequest, GetRevisionRequest, GitSource, ImportStatefileRequest, ListDeploymentsRequest, ListDeploymentsResponse, + ListPreviewsRequest, + ListPreviewsResponse, ListResourcesRequest, ListResourcesResponse, ListRevisionsRequest, @@ -37,6 +44,10 @@ LockDeploymentRequest, LockInfo, OperationMetadata, + Preview, + PreviewArtifacts, + PreviewOperationMetadata, + PreviewResult, Resource, ResourceCAIInfo, ResourceTerraformInfo, @@ -53,20 +64,27 @@ __all__ = ( "ApplyResults", "CreateDeploymentRequest", + "CreatePreviewRequest", "DeleteDeploymentRequest", + "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", "DeploymentOperationMetadata", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", "GetDeploymentRequest", + "GetPreviewRequest", "GetResourceRequest", "GetRevisionRequest", "GitSource", "ImportStatefileRequest", "ListDeploymentsRequest", "ListDeploymentsResponse", + "ListPreviewsRequest", + "ListPreviewsResponse", "ListResourcesRequest", "ListResourcesResponse", "ListRevisionsRequest", @@ -74,6 +92,10 @@ "LockDeploymentRequest", "LockInfo", "OperationMetadata", + "Preview", + "PreviewArtifacts", + "PreviewOperationMetadata", + "PreviewResult", "Resource", "ResourceCAIInfo", "ResourceTerraformInfo", diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/config.py b/packages/google-cloud-config/google/cloud/config_v1/types/config.py index 91145d025753..589c58143948 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/config.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/config.py @@ -60,6 +60,17 @@ "UnlockDeploymentRequest", "ExportLockInfoRequest", "LockInfo", + "Preview", + "PreviewOperationMetadata", + "PreviewArtifacts", + "CreatePreviewRequest", + "GetPreviewRequest", + "ListPreviewsRequest", + "ListPreviewsResponse", + "DeletePreviewRequest", + "ExportPreviewResultRequest", + "ExportPreviewResultResponse", + "PreviewResult", }, ) @@ -889,6 +900,11 @@ class DeletePolicy(proto.Enum): class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -896,6 +912,11 @@ class OperationMetadata(proto.Message): Output only. Metadata about the deployment operation state. + This field is a member of `oneof`_ ``resource_metadata``. + preview_metadata (google.cloud.config_v1.types.PreviewOperationMetadata): + Output only. Metadata about the preview + operation state. + This field is a member of `oneof`_ ``resource_metadata``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -929,6 +950,12 @@ class OperationMetadata(proto.Message): oneof="resource_metadata", message="DeploymentOperationMetadata", ) + preview_metadata: "PreviewOperationMetadata" = proto.Field( + proto.MESSAGE, + number=9, + oneof="resource_metadata", + message="PreviewOperationMetadata", + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, @@ -1799,4 +1826,622 @@ class LockInfo(proto.Message): ) +class Preview(proto.Message): + r"""A preview represents a set of actions Infra Manager would + perform to move the resources towards the desired state as + specified in the configuration. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + terraform_blueprint (google.cloud.config_v1.types.TerraformBlueprint): + The terraform blueprint to preview. + + This field is a member of `oneof`_ ``blueprint``. + name (str): + Identifier. Resource name of the preview. Resource name can + be user provided or server generated ID if unspecified. + Format: + ``projects/{project}/locations/{location}/previews/{preview}`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the preview was created. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + preview. + state (google.cloud.config_v1.types.Preview.State): + Output only. Current state of the preview. + deployment (str): + Optional. Optional deployment reference. If + specified, the preview will be performed using + the provided deployment's current state and use + any relevant fields from the deployment unless + explicitly specified in the preview create + request. + preview_mode (google.cloud.config_v1.types.Preview.PreviewMode): + Optional. Current mode of preview. + service_account (str): + Optional. Optional service account. If + omitted, the deployment resource reference must + be provided, and the service account attached to + the deployment will be used. + artifacts_gcs_bucket (str): + Optional. User-defined location of Cloud Build logs, + artifacts, and in Google Cloud Storage. Format: + ``gs://{bucket}/{folder}`` A default bucket will be + bootstrapped if the field is not set or empty Default Bucket + Format: ``gs://--blueprint-config`` + Constraints: + + - The bucket needs to be in the same project as the + deployment + - The path cannot be within the path of ``gcs_source`` If + omitted and deployment resource ref provided has + artifacts_gcs_bucket defined, that artifact bucket is + used. + + This field is a member of `oneof`_ ``_artifacts_gcs_bucket``. + worker_pool (str): + Optional. The user-specified Worker Pool resource in which + the Cloud Build job will execute. Format + projects/{project}/locations/{location}/workerPools/{workerPoolId} + If this field is unspecified, the default Cloud Build worker + pool will be used. If omitted and deployment resource ref + provided has worker_pool defined, that worker pool is used. + + This field is a member of `oneof`_ ``_worker_pool``. + error_code (google.cloud.config_v1.types.Preview.ErrorCode): + Output only. Code describing any errors that + may have occurred. + error_status (google.rpc.status_pb2.Status): + Output only. Additional information regarding + the current state. + build (str): + Output only. Cloud Build instance UUID + associated with this preview. + tf_errors (MutableSequence[google.cloud.config_v1.types.TerraformError]): + Output only. Summary of errors encountered + during Terraform preview. It has a size limit of + 10, i.e. only top 10 errors will be summarized + here. + error_logs (str): + Output only. Link to tf-error.ndjson file, which contains + the full list of the errors encountered during a Terraform + preview. Format: ``gs://{bucket}/{object}``. + preview_artifacts (google.cloud.config_v1.types.PreviewArtifacts): + Output only. Artifacts from preview. + logs (str): + Output only. Location of preview logs in + ``gs://{bucket}/{object}`` format. + """ + + class State(proto.Enum): + r"""Possible states of a preview. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is unknown. + CREATING (1): + The preview is being created. + SUCCEEDED (2): + The preview has succeeded. + APPLYING (3): + The preview is being applied. + STALE (4): + The preview is stale. A preview can become + stale if a revision has been applied after this + preview was created. + DELETING (5): + The preview is being deleted. + FAILED (6): + The preview has encountered an unexpected + error. + DELETED (7): + The preview has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + SUCCEEDED = 2 + APPLYING = 3 + STALE = 4 + DELETING = 5 + FAILED = 6 + DELETED = 7 + + class PreviewMode(proto.Enum): + r"""Preview mode provides options for customizing preview + operations. + + Values: + PREVIEW_MODE_UNSPECIFIED (0): + Unspecified policy, default mode will be + used. + DEFAULT (1): + DEFAULT mode generates an execution plan for + reconciling current resource state into expected + resource state. + DELETE (2): + DELETE mode generates as execution plan for + destroying current resources. + """ + PREVIEW_MODE_UNSPECIFIED = 0 + DEFAULT = 1 + DELETE = 2 + + class ErrorCode(proto.Enum): + r"""Possible errors that can occur with previews. + + Values: + ERROR_CODE_UNSPECIFIED (0): + No error code was specified. + CLOUD_BUILD_PERMISSION_DENIED (1): + Cloud Build failed due to a permissions + issue. + BUCKET_CREATION_PERMISSION_DENIED (2): + Cloud Storage bucket failed to create due to + a permissions issue. + BUCKET_CREATION_FAILED (3): + Cloud Storage bucket failed for a + non-permissions-related issue. + DEPLOYMENT_LOCK_ACQUIRE_FAILED (4): + Acquiring lock on provided deployment + reference failed. + PREVIEW_BUILD_API_FAILED (5): + Preview encountered an error when trying to + access Cloud Build API. + PREVIEW_BUILD_RUN_FAILED (6): + Preview created a build but build failed and + logs were generated. + """ + ERROR_CODE_UNSPECIFIED = 0 + CLOUD_BUILD_PERMISSION_DENIED = 1 + BUCKET_CREATION_PERMISSION_DENIED = 2 + BUCKET_CREATION_FAILED = 3 + DEPLOYMENT_LOCK_ACQUIRE_FAILED = 4 + PREVIEW_BUILD_API_FAILED = 5 + PREVIEW_BUILD_RUN_FAILED = 6 + + terraform_blueprint: "TerraformBlueprint" = proto.Field( + proto.MESSAGE, + number=6, + oneof="blueprint", + message="TerraformBlueprint", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + deployment: str = proto.Field( + proto.STRING, + number=5, + ) + preview_mode: PreviewMode = proto.Field( + proto.ENUM, + number=15, + enum=PreviewMode, + ) + service_account: str = proto.Field( + proto.STRING, + number=7, + ) + artifacts_gcs_bucket: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + error_code: ErrorCode = proto.Field( + proto.ENUM, + number=10, + enum=ErrorCode, + ) + error_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + build: str = proto.Field( + proto.STRING, + number=12, + ) + tf_errors: MutableSequence["TerraformError"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="TerraformError", + ) + error_logs: str = proto.Field( + proto.STRING, + number=14, + ) + preview_artifacts: "PreviewArtifacts" = proto.Field( + proto.MESSAGE, + number=16, + message="PreviewArtifacts", + ) + logs: str = proto.Field( + proto.STRING, + number=17, + ) + + +class PreviewOperationMetadata(proto.Message): + r"""Ephemeral metadata content describing the state of a preview + operation. + + Attributes: + step (google.cloud.config_v1.types.PreviewOperationMetadata.PreviewStep): + The current step the preview operation is + running. + preview_artifacts (google.cloud.config_v1.types.PreviewArtifacts): + Artifacts from preview. + logs (str): + Output only. Location of preview logs in + ``gs://{bucket}/{object}`` format. + build (str): + Output only. Cloud Build instance UUID + associated with this preview. + """ + + class PreviewStep(proto.Enum): + r"""The possible steps a preview may be running. + + Values: + PREVIEW_STEP_UNSPECIFIED (0): + Unspecified preview step. + PREPARING_STORAGE_BUCKET (1): + Infra Manager is creating a Google Cloud + Storage bucket to store artifacts and metadata + about the preview. + DOWNLOADING_BLUEPRINT (2): + Downloading the blueprint onto the Google + Cloud Storage bucket. + RUNNING_TF_INIT (3): + Initializing Terraform using ``terraform init``. + RUNNING_TF_PLAN (4): + Running ``terraform plan``. + FETCHING_DEPLOYMENT (5): + Fetching a deployment. + LOCKING_DEPLOYMENT (6): + Locking a deployment. + UNLOCKING_DEPLOYMENT (7): + Unlocking a deployment. + SUCCEEDED (8): + Operation was successful. + FAILED (9): + Operation failed. + """ + PREVIEW_STEP_UNSPECIFIED = 0 + PREPARING_STORAGE_BUCKET = 1 + DOWNLOADING_BLUEPRINT = 2 + RUNNING_TF_INIT = 3 + RUNNING_TF_PLAN = 4 + FETCHING_DEPLOYMENT = 5 + LOCKING_DEPLOYMENT = 6 + UNLOCKING_DEPLOYMENT = 7 + SUCCEEDED = 8 + FAILED = 9 + + step: PreviewStep = proto.Field( + proto.ENUM, + number=1, + enum=PreviewStep, + ) + preview_artifacts: "PreviewArtifacts" = proto.Field( + proto.MESSAGE, + number=2, + message="PreviewArtifacts", + ) + logs: str = proto.Field( + proto.STRING, + number=3, + ) + build: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PreviewArtifacts(proto.Message): + r"""Artifacts created by preview. + + Attributes: + content (str): + Output only. Location of a blueprint copy and other content + in Google Cloud Storage. Format: ``gs://{bucket}/{object}`` + artifacts (str): + Output only. Location of artifacts in Google Cloud Storage. + Format: ``gs://{bucket}/{object}`` + """ + + content: str = proto.Field( + proto.STRING, + number=1, + ) + artifacts: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreatePreviewRequest(proto.Message): + r"""A request to create a preview. + + Attributes: + parent (str): + Required. The parent in whose context the Preview is + created. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + preview_id (str): + Optional. The preview ID. + preview (google.cloud.config_v1.types.Preview): + Required. [Preview][google.cloud.config.v1.Preview] resource + to be created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + preview_id: str = proto.Field( + proto.STRING, + number=2, + ) + preview: "Preview" = proto.Field( + proto.MESSAGE, + number=3, + message="Preview", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetPreviewRequest(proto.Message): + r"""A request to get details about a preview. + + Attributes: + name (str): + Required. The name of the preview. Format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPreviewsRequest(proto.Message): + r"""A request to list all previews for a given project and + location. + + Attributes: + parent (str): + Required. The parent in whose context the Previews are + listed. The parent value is in the format: + 'projects/{project_id}/locations/{location}'. + page_size (int): + Optional. When requesting a page of resources, 'page_size' + specifies number of resources to return. If unspecified or + set to 0, all resources will be returned. + page_token (str): + Optional. Token returned by previous call to + 'ListDeployments' which specifies the position + in the list from where to continue listing the + resources. + filter (str): + Optional. Lists the Deployments that match the filter + expression. A filter expression filters the resources listed + in the response. The expression must be of the form '{field} + {operator} {value}' where operators: '<', '>', '<=', '>=', + '!=', '=', ':' are supported (colon ':' represents a HAS + operator which is roughly synonymous with equality). {field} + can refer to a proto or JSON field, or a synthetic field. + Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deployments/bar + + - Filter by labels: + + - Resources that have a key called 'foo' labels.foo:\* + - Resources that have a key called 'foo' whose value is + 'bar' labels.foo = bar + + - Filter by state: + + - Deployments in CREATING state. state=CREATING + order_by (str): + Optional. Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPreviewsResponse(proto.Message): + r"""A response to a ``ListPreviews`` call. Contains a list of Previews. + + Attributes: + previews (MutableSequence[google.cloud.config_v1.types.Preview]): + List of [Previews][]s. + next_page_token (str): + Token to be supplied to the next ListPreviews request via + ``page_token`` to obtain the next set of results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + previews: MutableSequence["Preview"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Preview", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePreviewRequest(proto.Message): + r"""A request to delete a preview. + + Attributes: + name (str): + Required. The name of the Preview in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExportPreviewResultRequest(proto.Message): + r"""A request to export preview results. + + Attributes: + parent (str): + Required. The preview whose results should be exported. The + preview value is in the format: + 'projects/{project_id}/locations/{location}/previews/{preview}'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportPreviewResultResponse(proto.Message): + r"""A response to ``ExportPreviewResult`` call. Contains preview + results. + + Attributes: + result (google.cloud.config_v1.types.PreviewResult): + Output only. Signed URLs for accessing the + plan files. + """ + + result: "PreviewResult" = proto.Field( + proto.MESSAGE, + number=1, + message="PreviewResult", + ) + + +class PreviewResult(proto.Message): + r"""Contains a signed Cloud Storage URLs. + + Attributes: + binary_signed_uri (str): + Output only. Plan binary signed URL + json_signed_uri (str): + Output only. Plan JSON signed URL + """ + + binary_signed_uri: str = proto.Field( + proto.STRING, + number=1, + ) + json_signed_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py new file mode 100644 index 000000000000..16fcc030958b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreatePreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_create_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreatePreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py new file mode 100644 index 000000000000..db581ef7d92b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_preview_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreatePreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_create_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + preview = config_v1.Preview() + preview.terraform_blueprint.gcs_source = "gcs_source_value" + + request = config_v1.CreatePreviewRequest( + parent="parent_value", + preview=preview, + ) + + # Make the request + operation = client.create_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_CreatePreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py new file mode 100644 index 000000000000..e0582077006b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeletePreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_delete_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeletePreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py new file mode 100644 index 000000000000..4b79b21e5600 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_preview_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeletePreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_delete_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeletePreviewRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_preview(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END config_v1_generated_Config_DeletePreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py new file mode 100644 index 000000000000..4ed8c1b5b82b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportPreviewResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportPreviewResult_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.export_preview_result(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportPreviewResult_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py new file mode 100644 index 000000000000..e5b314c834fa --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_export_preview_result_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportPreviewResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ExportPreviewResult_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_export_preview_result(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ExportPreviewResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.export_preview_result(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_ExportPreviewResult_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py new file mode 100644 index 000000000000..e27ee6bbb74c --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetPreview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_preview(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_preview(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetPreview_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py new file mode 100644 index 000000000000..746f326e3a5b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_preview_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPreview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetPreview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_preview(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetPreviewRequest( + name="name_value", + ) + + # Make the request + response = client.get_preview(request=request) + + # Handle the response + print(response) + +# [END config_v1_generated_Config_GetPreview_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py new file mode 100644 index 000000000000..76176272ca92 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPreviews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListPreviews_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_previews(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListPreviews_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py new file mode 100644 index 000000000000..b781dfa7f067 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_previews_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPreviews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListPreviews_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_previews(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListPreviewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_previews(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END config_v1_generated_Config_ListPreviews_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index 1dc05379a40f..9f9d26c92521 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.2" + "version": "0.1.0" }, "snippets": [ { @@ -188,6 +188,175 @@ ], "title": "config_v1_generated_config_create_deployment_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.create_preview", + "method": { + "fullName": "google.cloud.config.v1.Config.CreatePreview", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreatePreview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreatePreviewRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "preview", + "type": "google.cloud.config_v1.types.Preview" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_preview" + }, + "description": "Sample for CreatePreview", + "file": "config_v1_generated_config_create_preview_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreatePreview_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_preview_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.create_preview", + "method": { + "fullName": "google.cloud.config.v1.Config.CreatePreview", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreatePreview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreatePreviewRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "preview", + "type": "google.cloud.config_v1.types.Preview" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_preview" + }, + "description": "Sample for CreatePreview", + "file": "config_v1_generated_config_create_preview_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreatePreview_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_preview_sync.py" + }, { "canonical": true, "clientMethod": { @@ -357,19 +526,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -388,21 +557,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_async.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_async", + "regionTag": "config_v1_generated_Config_DeletePreview_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -412,20 +582,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_async.py" + "title": "config_v1_generated_config_delete_preview_async.py" }, { "canonical": true, @@ -434,19 +606,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -465,21 +637,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_sync.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", + "regionTag": "config_v1_generated_Config_DeletePreview_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -489,20 +662,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_sync.py" + "title": "config_v1_generated_config_delete_preview_sync.py" }, { "canonical": true, @@ -512,19 +687,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "DeleteStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -539,22 +718,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "shortName": "delete_statefile" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "regionTag": "config_v1_generated_Config_DeleteStatefile_async", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -564,22 +742,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 51, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_deployment_statefile_async.py" + "title": "config_v1_generated_config_delete_statefile_async.py" }, { "canonical": true, @@ -588,19 +764,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "DeleteStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -615,22 +795,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "shortName": "delete_statefile" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -640,18 +819,169 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -667,21 +997,331 @@ }, "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_lock_info", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "ExportRevisionStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" }, { "name": "retry", @@ -696,14 +1336,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_async.py", + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", "segments": [ { "end": 51, @@ -736,7 +1376,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_async.py" + "title": "config_v1_generated_config_export_revision_statefile_async.py" }, { "canonical": true, @@ -745,23 +1385,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "ExportRevisionStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" }, { "name": "retry", @@ -776,14 +1412,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_sync.py", + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", "segments": [ { "end": 51, @@ -816,7 +1452,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_sync.py" + "title": "config_v1_generated_config_export_revision_statefile_sync.py" }, { "canonical": true, @@ -826,19 +1462,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -853,14 +1493,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_async.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", + "regionTag": "config_v1_generated_Config_GetDeployment_async", "segments": [ { "end": 51, @@ -893,7 +1533,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_async.py" + "title": "config_v1_generated_config_get_deployment_async.py" }, { "canonical": true, @@ -902,19 +1542,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -929,14 +1573,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_sync.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", + "regionTag": "config_v1_generated_Config_GetDeployment_sync", "segments": [ { "end": 51, @@ -969,7 +1613,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_sync.py" + "title": "config_v1_generated_config_get_deployment_sync.py" }, { "canonical": true, @@ -979,19 +1623,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1010,14 +1654,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_async.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_async", + "regionTag": "config_v1_generated_Config_GetPreview_async", "segments": [ { "end": 51, @@ -1050,7 +1694,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_async.py" + "title": "config_v1_generated_config_get_preview_async.py" }, { "canonical": true, @@ -1059,19 +1703,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1090,14 +1734,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_sync.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_sync", + "regionTag": "config_v1_generated_Config_GetPreview_sync", "segments": [ { "end": 51, @@ -1130,7 +1774,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_sync.py" + "title": "config_v1_generated_config_get_preview_sync.py" }, { "canonical": true, @@ -1784,6 +2428,167 @@ ], "title": "config_v1_generated_config_list_deployments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_previews", + "method": { + "fullName": "google.cloud.config.v1.Config.ListPreviews", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListPreviews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListPreviewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListPreviewsAsyncPager", + "shortName": "list_previews" + }, + "description": "Sample for ListPreviews", + "file": "config_v1_generated_config_list_previews_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListPreviews_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_previews_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_previews", + "method": { + "fullName": "google.cloud.config.v1.Config.ListPreviews", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListPreviews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListPreviewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListPreviewsPager", + "shortName": "list_previews" + }, + "description": "Sample for ListPreviews", + "file": "config_v1_generated_config_list_previews_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListPreviews_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_previews_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py index 8d7b0d1d739f..c9978014e855 100644 --- a/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py +++ b/packages/google-cloud-config/scripts/fixup_config_v1_keywords.py @@ -40,16 +40,21 @@ class configCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_deployment': ('parent', 'deployment_id', 'deployment', 'request_id', ), + 'create_preview': ('parent', 'preview', 'preview_id', 'request_id', ), 'delete_deployment': ('name', 'request_id', 'force', 'delete_policy', ), + 'delete_preview': ('name', 'request_id', ), 'delete_statefile': ('name', 'lock_id', ), 'export_deployment_statefile': ('parent', 'draft', ), 'export_lock_info': ('name', ), + 'export_preview_result': ('parent', ), 'export_revision_statefile': ('parent', ), 'get_deployment': ('name', ), + 'get_preview': ('name', ), 'get_resource': ('name', ), 'get_revision': ('name', ), 'import_statefile': ('parent', 'lock_id', 'skip_draft', ), 'list_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_previews': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_resources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_revisions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'lock_deployment': ('name', ), diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index 635b310aed98..e4be71f0f933 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -5024,6 +5024,1336 @@ async def test_export_lock_info_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + config.CreatePreviewRequest, + dict, + ], +) +def test_create_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + client.create_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + +@pytest.mark.asyncio +async def test_create_preview_async( + transport: str = "grpc_asyncio", request_type=config.CreatePreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreatePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_preview_async_from_dict(): + await test_create_preview_async(request_type=dict) + + +def test_create_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreatePreviewRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreatePreviewRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_preview( + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].preview + mock_val = config.Preview( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + + +def test_create_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + +@pytest.mark.asyncio +async def test_create_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_preview( + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].preview + mock_val = config.Preview( + terraform_blueprint=config.TerraformBlueprint(gcs_source="gcs_source_value") + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetPreviewRequest, + dict, + ], +) +def test_get_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) + response = client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" + + +def test_get_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + client.get_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + +@pytest.mark.asyncio +async def test_get_preview_async( + transport: str = "grpc_asyncio", request_type=config.GetPreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) + ) + response = await client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetPreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" + + +@pytest.mark.asyncio +async def test_get_preview_async_from_dict(): + await test_get_preview_async(request_type=dict) + + +def test_get_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetPreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + call.return_value = config.Preview() + client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetPreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Preview()) + await client.get_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_preview( + config.GetPreviewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.Preview() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(config.Preview()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_preview( + config.GetPreviewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListPreviewsRequest, + dict, + ], +) +def test_list_previews(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPreviewsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_previews_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + client.list_previews() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + +@pytest.mark.asyncio +async def test_list_previews_async( + transport: str = "grpc_asyncio", request_type=config.ListPreviewsRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListPreviewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPreviewsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_previews_async_from_dict(): + await test_list_previews_async(request_type=dict) + + +def test_list_previews_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListPreviewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + call.return_value = config.ListPreviewsResponse() + client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_previews_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListPreviewsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse() + ) + await client.list_previews(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_previews_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_previews( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_previews_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_previews_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListPreviewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_previews( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_previews_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) + + +def test_list_previews_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_previews(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) + + +def test_list_previews_pages(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + pages = list(client.list_previews(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_previews_async_pager(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_previews), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_previews( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, config.Preview) for i in responses) + + +@pytest.mark.asyncio +async def test_list_previews_async_pages(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_previews), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_previews(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeletePreviewRequest, + dict, + ], +) +def test_delete_preview(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_preview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + client.delete_preview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + +@pytest.mark.asyncio +async def test_delete_preview_async( + transport: str = "grpc_asyncio", request_type=config.DeletePreviewRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeletePreviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_preview_async_from_dict(): + await test_delete_preview_async(request_type=dict) + + +def test_delete_preview_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeletePreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_preview_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeletePreviewRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_preview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_preview_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_preview_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_preview( + config.DeletePreviewRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_preview_flattened_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_preview( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_preview_flattened_error_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_preview( + config.DeletePreviewRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportPreviewResultRequest, + dict, + ], +) +def test_export_preview_result(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ExportPreviewResultResponse() + response = client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ExportPreviewResultResponse) + + +def test_export_preview_result_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + client.export_preview_result() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + +@pytest.mark.asyncio +async def test_export_preview_result_async( + transport: str = "grpc_asyncio", request_type=config.ExportPreviewResultRequest +): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ExportPreviewResultResponse() + ) + response = await client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == config.ExportPreviewResultRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ExportPreviewResultResponse) + + +@pytest.mark.asyncio +async def test_export_preview_result_async_from_dict(): + await test_export_preview_result_async(request_type=dict) + + +def test_export_preview_result_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportPreviewResultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + call.return_value = config.ExportPreviewResultResponse() + client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_preview_result_field_headers_async(): + client = ConfigAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ExportPreviewResultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ExportPreviewResultResponse() + ) + await client.export_preview_result(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -5031,20 +6361,1756 @@ async def test_export_lock_info_flattened_error_async(): dict, ], ) -def test_list_deployments_rest(request_type): +def test_list_deployments_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployments_rest_required_fields( + request_type=config.ListDeploymentsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.ListDeploymentsResponse.to_json( + config.ListDeploymentsResponse() + ) + + request = config.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListDeploymentsResponse() + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=config.ListDeploymentsRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) + + +def test_list_deployments_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + + +def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = config.Deployment.to_json(config.Deployment()) + + request = config.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Deployment() + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=config.GetDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) + + +def test_get_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + config.GetDeploymentRequest(), + name="name_value", + ) + + +def test_get_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_deployment_rest_required_fields( + request_type=config.CreateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deployment(request) + + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentId", + "requestId", + ) + ) + & set( + ( + "parent", + "deploymentId", + "deployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=config.CreateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) + + +def test_create_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", + ) + + +def test_create_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "projects/sample1/locations/sample2/deployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_deployment_rest_required_fields( + request_type=config.UpdateDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("deployment",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UpdateDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) + + +def test_update_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_deployment_rest_required_fields( + request_type=config.DeleteDeploymentRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "delete_policy", + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deletePolicy", + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = config.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=config.DeleteDeploymentRequest +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) + + +def test_delete_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", + ) + + +def test_delete_deployment_rest_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListRevisionsRequest, + dict, + ], +) +def test_list_revisions_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse( + return_value = config.ListRevisionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -5053,22 +8119,20 @@ def test_list_deployments_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_deployments(request) + response = client.list_revisions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) + assert isinstance(response, pagers.ListRevisionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_deployments_rest_required_fields( - request_type=config.ListDeploymentsRequest, -): +def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -5087,7 +8151,7 @@ def test_list_deployments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5096,7 +8160,7 @@ def test_list_deployments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -5119,7 +8183,7 @@ def test_list_deployments_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = config.ListRevisionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5140,25 +8204,25 @@ def test_list_deployments_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_deployments(request) + response = client.list_revisions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_deployments_rest_unset_required_fields(): +def test_list_revisions_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_deployments._get_unset_required_fields({}) + unset_fields = transport.list_revisions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -5173,7 +8237,7 @@ def test_list_deployments_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): +def test_list_revisions_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5184,13 +8248,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_deployments" + transports.ConfigRestInterceptor, "post_list_revisions" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_deployments" + transports.ConfigRestInterceptor, "pre_list_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5201,19 +8265,19 @@ def test_list_deployments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListDeploymentsResponse.to_json( - config.ListDeploymentsResponse() + req.return_value._content = config.ListRevisionsResponse.to_json( + config.ListRevisionsResponse() ) - request = config.ListDeploymentsRequest() + request = config.ListRevisionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListDeploymentsResponse() + post.return_value = config.ListRevisionsResponse() - client.list_deployments( + client.list_revisions( request, metadata=[ ("key", "val"), @@ -5225,8 +8289,8 @@ def test_list_deployments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_deployments_rest_bad_request( - transport: str = "rest", request_type=config.ListDeploymentsRequest +def test_list_revisions_rest_bad_request( + transport: str = "rest", request_type=config.ListRevisionsRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5234,7 +8298,7 @@ def test_list_deployments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5246,10 +8310,10 @@ def test_list_deployments_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_deployments(request) + client.list_revisions(request) -def test_list_deployments_rest_flattened(): +def test_list_revisions_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5258,10 +8322,12 @@ def test_list_deployments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() + return_value = config.ListRevisionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -5273,25 +8339,25 @@ def test_list_deployments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_deployments(**mock_args) + client.list_revisions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" % client.transport._host, args[1], ) -def test_list_deployments_rest_flattened_error(transport: str = "rest"): +def test_list_revisions_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5300,13 +8366,13 @@ def test_list_deployments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_deployments( - config.ListDeploymentsRequest(), + client.list_revisions( + config.ListRevisionsRequest(), parent="parent_value", ) -def test_list_deployments_rest_pager(transport: str = "rest"): +def test_list_revisions_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5318,28 +8384,28 @@ def test_list_deployments_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), ], next_page_token="abc", ), - config.ListDeploymentsResponse( - deployments=[], + config.ListRevisionsResponse( + revisions=[], next_page_token="def", ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), ], next_page_token="ghi", ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), ], ), ) @@ -5347,22 +8413,24 @@ def test_list_deployments_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } - pager = client.list_deployments(request=sample_request) + pager = client.list_revisions(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.Deployment) for i in results) + assert all(isinstance(i, config.Revision) for i in results) - pages = list(client.list_deployments(request=sample_request).pages) + pages = list(client.list_revisions(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -5370,68 +8438,68 @@ def test_list_deployments_rest_pager(transport: str = "rest"): @pytest.mark.parametrize( "request_type", [ - config.GetDeploymentRequest, + config.GetRevisionRequest, dict, ], ) -def test_get_deployment_rest(request_type): +def test_get_revision_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment( + return_value = config.Revision( name="name_value", - state=config.Deployment.State.CREATING, - latest_revision="latest_revision_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, state_detail="state_detail_value", - error_code=config.Deployment.ErrorCode.REVISION_FAILED, - delete_build="delete_build_value", - delete_logs="delete_logs_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", error_logs="error_logs_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", service_account="service_account_value", import_existing_resources=True, worker_pool="worker_pool_value", - lock_state=config.Deployment.LockState.LOCKED, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_deployment(request) + response = client.get_revision(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Deployment) + assert isinstance(response, config.Revision) assert response.name == "name_value" - assert response.state == config.Deployment.State.CREATING - assert response.latest_revision == "latest_revision_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING assert response.state_detail == "state_detail_value" - assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED - assert response.delete_build == "delete_build_value" - assert response.delete_logs == "delete_logs_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" assert response.error_logs == "error_logs_value" - assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" assert response.service_account == "service_account_value" assert response.import_existing_resources is True assert response.worker_pool == "worker_pool_value" - assert response.lock_state == config.Deployment.LockState.LOCKED -def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): +def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -5450,7 +8518,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5459,7 +8527,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5473,7 +8541,7 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = config.Revision() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5494,30 +8562,30 @@ def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRe response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_deployment(request) + response = client.get_revision(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_deployment_rest_unset_required_fields(): +def test_get_revision_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_deployment._get_unset_required_fields({}) + unset_fields = transport.get_revision._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): +def test_get_revision_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5528,13 +8596,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_deployment" + transports.ConfigRestInterceptor, "post_get_revision" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_deployment" + transports.ConfigRestInterceptor, "pre_get_revision" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5545,17 +8613,17 @@ def test_get_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Deployment.to_json(config.Deployment()) + req.return_value._content = config.Revision.to_json(config.Revision()) - request = config.GetDeploymentRequest() + request = config.GetRevisionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Deployment() + post.return_value = config.Revision() - client.get_deployment( + client.get_revision( request, metadata=[ ("key", "val"), @@ -5567,8 +8635,8 @@ def test_get_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_deployment_rest_bad_request( - transport: str = "rest", request_type=config.GetDeploymentRequest +def test_get_revision_rest_bad_request( + transport: str = "rest", request_type=config.GetRevisionRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5576,7 +8644,9 @@ def test_get_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5588,10 +8658,10 @@ def test_get_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_deployment(request) + client.get_revision(request) -def test_get_deployment_rest_flattened(): +def test_get_revision_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5600,11 +8670,11 @@ def test_get_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment() + return_value = config.Revision() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field @@ -5617,25 +8687,25 @@ def test_get_deployment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_deployment(**mock_args) + client.get_revision(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" % client.transport._host, args[1], ) -def test_get_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_revision_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5644,13 +8714,13 @@ def test_get_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_deployment( - config.GetDeploymentRequest(), + client.get_revision( + config.GetRevisionRequest(), name="name_value", ) -def test_get_deployment_rest_error(): +def test_get_revision_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5659,162 +8729,54 @@ def test_get_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.CreateDeploymentRequest, + config.GetResourceRequest, dict, ], ) -def test_create_deployment_rest(request_type): +def test_get_resource_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_deployment(request) + response = client.get_resource(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED -def test_create_deployment_rest_required_fields( - request_type=config.CreateDeploymentRequest, -): +def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" - request_init["deployment_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5826,37 +8788,24 @@ def test_create_deployment_rest_required_fields( ) # verify fields with default values are dropped - assert "deploymentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == request_init["deployment_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["deploymentId"] = "deployment_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "deployment_id", - "request_id", - ) - ) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == "deployment_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5865,7 +8814,7 @@ def test_create_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5877,57 +8826,39 @@ def test_create_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_deployment(request) + response = client.get_resource(request) - expected_params = [ - ( - "deploymentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_deployment_rest_unset_required_fields(): +def test_get_resource_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deploymentId", - "requestId", - ) - ) - & set( - ( - "parent", - "deploymentId", - "deployment", - ) - ) - ) + unset_fields = transport.get_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): +def test_get_resource_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -5938,15 +8869,13 @@ def test_create_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_create_deployment" + transports.ConfigRestInterceptor, "post_get_resource" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_create_deployment" + transports.ConfigRestInterceptor, "pre_get_resource" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5957,19 +8886,17 @@ def test_create_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = config.Resource.to_json(config.Resource()) - request = config.CreateDeploymentRequest() + request = config.GetResourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.Resource() - client.create_deployment( + client.get_resource( request, metadata=[ ("key", "val"), @@ -5981,8 +8908,8 @@ def test_create_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_deployment_rest_bad_request( - transport: str = "rest", request_type=config.CreateDeploymentRequest +def test_get_resource_rest_bad_request( + transport: str = "rest", request_type=config.GetResourceRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5990,7 +8917,9 @@ def test_create_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6002,10 +8931,10 @@ def test_create_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_deployment(request) + client.get_resource(request) -def test_create_deployment_rest_flattened(): +def test_get_resource_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6014,44 +8943,42 @@ def test_create_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_deployment(**mock_args) + client.get_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" % client.transport._host, args[1], ) -def test_create_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_resource_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6060,19 +8987,13 @@ def test_create_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_deployment( - config.CreateDeploymentRequest(), - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + client.get_resource( + config.GetResourceRequest(), + name="name_value", ) -def test_create_deployment_rest_error(): +def test_get_resource_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6081,11 +9002,11 @@ def test_create_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.UpdateDeploymentRequest, + config.ListResourcesRequest, dict, ], ) -def test_update_deployment_rest(request_type): +def test_list_resources_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6093,150 +9014,40 @@ def test_update_deployment_rest(request_type): # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "projects/sample1/locations/sample2/deployments/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_deployment(request) + response = client.list_resources(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_deployment_rest_required_fields( - request_type=config.UpdateDeploymentRequest, -): +def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): transport_class = transports.ConfigRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6251,24 +9062,30 @@ def test_update_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6277,7 +9094,7 @@ def test_update_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6289,45 +9106,49 @@ def test_update_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_deployment(request) + response = client.list_resources(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_deployment_rest_unset_required_fields(): +def test_list_resources_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_deployment._get_unset_required_fields({}) + unset_fields = transport.list_resources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - & set(("deployment",)) + & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): +def test_list_resources_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6338,15 +9159,13 @@ def test_update_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_update_deployment" + transports.ConfigRestInterceptor, "post_list_resources" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_update_deployment" + transports.ConfigRestInterceptor, "pre_list_resources" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6357,19 +9176,19 @@ def test_update_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = config.ListResourcesResponse.to_json( + config.ListResourcesResponse() ) - request = config.UpdateDeploymentRequest() + request = config.ListResourcesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.ListResourcesResponse() - client.update_deployment( + client.list_resources( request, metadata=[ ("key", "val"), @@ -6381,8 +9200,8 @@ def test_update_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_deployment_rest_bad_request( - transport: str = "rest", request_type=config.UpdateDeploymentRequest +def test_list_resources_rest_bad_request( + transport: str = "rest", request_type=config.ListResourcesRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6391,7 +9210,7 @@ def test_update_deployment_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } request = request_type(**request_init) @@ -6404,10 +9223,10 @@ def test_update_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_deployment(request) + client.list_resources(request) -def test_update_deployment_rest_flattened(): +def test_list_resources_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6416,47 +9235,42 @@ def test_update_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourcesResponse() # get arguments that satisfy an http rule for this method sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } # get truthy value for each flattened field mock_args = dict( - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_deployment(**mock_args) + client.list_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" % client.transport._host, args[1], ) -def test_update_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_resources_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6465,65 +9279,122 @@ def test_update_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_deployment( - config.UpdateDeploymentRequest(), - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resources( + config.ListResourcesRequest(), + parent="parent_value", ) -def test_update_deployment_rest_error(): +def test_list_resources_rest_pager(transport: str = "rest"): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), + ], + next_page_token="abc", + ), + config.ListResourcesResponse( + resources=[], + next_page_token="def", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + ], + next_page_token="ghi", + ), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + + pager = client.list_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Resource) for i in results) + + pages = list(client.list_resources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - config.DeleteDeploymentRequest, + config.ExportDeploymentStatefileRequest, dict, ], ) -def test_delete_deployment_rest(request_type): +def test_export_deployment_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_deployment(request) + response = client.export_deployment_statefile(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_delete_deployment_rest_required_fields( - request_type=config.DeleteDeploymentRequest, +def test_export_deployment_statefile_rest_required_fields( + request_type=config.ExportDeploymentStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6538,29 +9409,21 @@ def test_delete_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delete_policy", - "force", - "request_id", - ) - ) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6569,7 +9432,7 @@ def test_delete_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6581,45 +9444,40 @@ def test_delete_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_deployment(request) + response = client.export_deployment_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_deployment_rest_unset_required_fields(): +def test_export_deployment_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deletePolicy", - "force", - "requestId", - ) - ) - & set(("name",)) - ) + unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): +def test_export_deployment_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6630,15 +9488,15 @@ def test_delete_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_deployment" + transports.ConfigRestInterceptor, "post_export_deployment_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_deployment" + transports.ConfigRestInterceptor, "pre_export_deployment_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + pb_message = config.ExportDeploymentStatefileRequest.pb( + config.ExportDeploymentStatefileRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6649,19 +9507,17 @@ def test_delete_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.DeleteDeploymentRequest() + request = config.ExportDeploymentStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.Statefile() - client.delete_deployment( + client.export_deployment_statefile( request, metadata=[ ("key", "val"), @@ -6673,8 +9529,8 @@ def test_delete_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_deployment_rest_bad_request( - transport: str = "rest", request_type=config.DeleteDeploymentRequest +def test_export_deployment_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportDeploymentStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6682,7 +9538,7 @@ def test_delete_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6694,67 +9550,10 @@ def test_delete_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_deployment(request) - - -def test_delete_deployment_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - config.DeleteDeploymentRequest(), - name="name_value", - ) + client.export_deployment_statefile(request) -def test_delete_deployment_rest_error(): +def test_export_deployment_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6763,46 +9562,48 @@ def test_delete_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ListRevisionsRequest, + config.ExportRevisionStatefileRequest, dict, ], ) -def test_list_revisions_rest(request_type): +def test_export_revision_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = config.Statefile( + signed_uri="signed_uri_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_revisions(request) + response = client.export_revision_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRevisionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): +def test_export_revision_statefile_rest_required_fields( + request_type=config.ExportRevisionStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} @@ -6821,7 +9622,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6830,16 +9631,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).export_revision_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6853,7 +9645,7 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6865,49 +9657,40 @@ def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_revisions(request) + response = client.export_revision_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_revisions_rest_unset_required_fields(): +def test_export_revision_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_revisions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_revisions_rest_interceptors(null_interceptor): +def test_export_revision_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -6918,13 +9701,15 @@ def test_list_revisions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_revisions" + transports.ConfigRestInterceptor, "post_export_revision_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_revisions" + transports.ConfigRestInterceptor, "pre_export_revision_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) + pb_message = config.ExportRevisionStatefileRequest.pb( + config.ExportRevisionStatefileRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6935,19 +9720,17 @@ def test_list_revisions_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListRevisionsResponse.to_json( - config.ListRevisionsResponse() - ) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.ListRevisionsRequest() + request = config.ExportRevisionStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListRevisionsResponse() + post.return_value = config.Statefile() - client.list_revisions( + client.export_revision_statefile( request, metadata=[ ("key", "val"), @@ -6959,8 +9742,8 @@ def test_list_revisions_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_revisions_rest_bad_request( - transport: str = "rest", request_type=config.ListRevisionsRequest +def test_export_revision_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ExportRevisionStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6968,7 +9751,9 @@ def test_list_revisions_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6980,200 +9765,63 @@ def test_list_revisions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_revisions(request) - - -def test_list_revisions_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_revisions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" - % client.transport._host, - args[1], - ) - - -def test_list_revisions_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_revisions( - config.ListRevisionsRequest(), - parent="parent_value", - ) + client.export_revision_statefile(request) -def test_list_revisions_rest_pager(transport: str = "rest"): +def test_export_revision_statefile_rest_error(): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - config.Revision(), - ], - next_page_token="abc", - ), - config.ListRevisionsResponse( - revisions=[], - next_page_token="def", - ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - ], - next_page_token="ghi", - ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - pager = client.list_revisions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Revision) for i in results) - - pages = list(client.list_revisions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - config.GetRevisionRequest, + config.ImportStatefileRequest, dict, ], ) -def test_get_revision_rest(request_type): +def test_import_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Revision( - name="name_value", - action=config.Revision.Action.CREATE, - state=config.Revision.State.APPLYING, - state_detail="state_detail_value", - error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - logs="logs_value", - error_logs="error_logs_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", + return_value = config.Statefile( + signed_uri="signed_uri_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_revision(request) + response = client.import_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Revision) - assert response.name == "name_value" - assert response.action == config.Revision.Action.CREATE - assert response.state == config.Revision.State.APPLYING - assert response.state_detail == "state_detail_value" - assert ( - response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED - ) - assert response.build == "build_value" - assert response.logs == "logs_value" - assert response.error_logs == "error_logs_value" - assert response.service_account == "service_account_value" - assert response.import_existing_resources is True - assert response.worker_pool == "worker_pool_value" + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" -def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): +def test_import_statefile_rest_required_fields( + request_type=config.ImportStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7188,21 +9836,24 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7211,7 +9862,7 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Revision() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7223,39 +9874,48 @@ def test_get_revision_rest_required_fields(request_type=config.GetRevisionReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_revision(request) + response = client.import_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_revision_rest_unset_required_fields(): +def test_import_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_revision._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_revision_rest_interceptors(null_interceptor): +def test_import_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7266,13 +9926,13 @@ def test_get_revision_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_revision" + transports.ConfigRestInterceptor, "post_import_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_revision" + transports.ConfigRestInterceptor, "pre_import_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) + pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7283,17 +9943,17 @@ def test_get_revision_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Revision.to_json(config.Revision()) + req.return_value._content = config.Statefile.to_json(config.Statefile()) - request = config.GetRevisionRequest() + request = config.ImportStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Revision() + post.return_value = config.Statefile() - client.get_revision( + client.import_statefile( request, metadata=[ ("key", "val"), @@ -7305,8 +9965,8 @@ def test_get_revision_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_revision_rest_bad_request( - transport: str = "rest", request_type=config.GetRevisionRequest +def test_import_statefile_rest_bad_request( + transport: str = "rest", request_type=config.ImportStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7314,9 +9974,7 @@ def test_get_revision_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7328,10 +9986,10 @@ def test_get_revision_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_revision(request) + client.import_statefile(request) -def test_get_revision_rest_flattened(): +def test_import_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7340,16 +9998,17 @@ def test_get_revision_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Revision() + return_value = config.Statefile() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + "parent": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + lock_id=725, ) mock_args.update(sample_request) @@ -7357,25 +10016,25 @@ def test_get_revision_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_revision(**mock_args) + client.import_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" + "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" % client.transport._host, args[1], ) -def test_get_revision_rest_flattened_error(transport: str = "rest"): +def test_import_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7384,13 +10043,14 @@ def test_get_revision_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_revision( - config.GetRevisionRequest(), - name="name_value", + client.import_statefile( + config.ImportStatefileRequest(), + parent="parent_value", + lock_id=725, ) -def test_get_revision_rest_error(): +def test_import_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7399,54 +10059,46 @@ def test_get_revision_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.GetResourceRequest, + config.DeleteStatefileRequest, dict, ], ) -def test_get_resource_rest(request_type): +def test_delete_statefile_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Resource( - name="name_value", - intent=config.Resource.Intent.CREATE, - state=config.Resource.State.PLANNED, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_resource(request) + response = client.delete_statefile(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Resource) - assert response.name == "name_value" - assert response.intent == config.Resource.Intent.CREATE - assert response.state == config.Resource.State.PLANNED + assert response is None -def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): +def test_delete_statefile_rest_required_fields( + request_type=config.DeleteStatefileRequest, +): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7461,21 +10113,24 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7484,7 +10139,7 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Resource() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7496,39 +10151,45 @@ def test_get_resource_rest_required_fields(request_type=config.GetResourceReques pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_resource(request) + response = client.delete_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_rest_unset_required_fields(): +def test_delete_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_rest_interceptors(null_interceptor): +def test_delete_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7539,13 +10200,10 @@ def test_get_resource_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource" - ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_resource" + transports.ConfigRestInterceptor, "pre_delete_statefile" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) + pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7556,17 +10214,15 @@ def test_get_resource_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Resource.to_json(config.Resource()) - request = config.GetResourceRequest() + request = config.DeleteStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Resource() - client.get_resource( + client.delete_statefile( request, metadata=[ ("key", "val"), @@ -7575,11 +10231,10 @@ def test_get_resource_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_resource_rest_bad_request( - transport: str = "rest", request_type=config.GetResourceRequest +def test_delete_statefile_rest_bad_request( + transport: str = "rest", request_type=config.DeleteStatefileRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7587,9 +10242,7 @@ def test_get_resource_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7601,10 +10254,10 @@ def test_get_resource_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_resource(request) + client.delete_statefile(request) -def test_get_resource_rest_flattened(): +def test_delete_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7613,11 +10266,11 @@ def test_get_resource_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Resource() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field @@ -7629,26 +10282,24 @@ def test_get_resource_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_resource(**mock_args) + client.delete_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" % client.transport._host, args[1], ) -def test_get_resource_rest_flattened_error(transport: str = "rest"): +def test_delete_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7657,13 +10308,13 @@ def test_get_resource_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource( - config.GetResourceRequest(), + client.delete_statefile( + config.DeleteStatefileRequest(), name="name_value", ) -def test_get_resource_rest_error(): +def test_delete_statefile_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7672,52 +10323,45 @@ def test_get_resource_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ListResourcesRequest, + config.LockDeploymentRequest, dict, ], ) -def test_list_resources_rest(request_type): +def test_lock_deployment_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_resources(request) + response = client.lock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourcesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.operation.name == "operations/spam" -def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): +def test_lock_deployment_rest_required_fields( + request_type=config.LockDeploymentRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7732,30 +10376,21 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7764,7 +10399,7 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7776,49 +10411,37 @@ def test_list_resources_rest_required_fields(request_type=config.ListResourcesRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_resources(request) + response = client.lock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resources_rest_unset_required_fields(): +def test_lock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.lock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resources_rest_interceptors(null_interceptor): +def test_lock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -7829,13 +10452,15 @@ def test_list_resources_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resources" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_lock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resources" + transports.ConfigRestInterceptor, "pre_lock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) + pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7846,19 +10471,19 @@ def test_list_resources_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.ListResourcesResponse.to_json( - config.ListResourcesResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = config.ListResourcesRequest() + request = config.LockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourcesResponse() + post.return_value = operations_pb2.Operation() - client.list_resources( + client.lock_deployment( request, metadata=[ ("key", "val"), @@ -7870,8 +10495,8 @@ def test_list_resources_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_resources_rest_bad_request( - transport: str = "rest", request_type=config.ListResourcesRequest +def test_lock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.LockDeploymentRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7879,9 +10504,7 @@ def test_list_resources_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7893,10 +10516,10 @@ def test_list_resources_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_resources(request) + client.lock_deployment(request) -def test_list_resources_rest_flattened(): +def test_lock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7905,42 +10528,40 @@ def test_list_resources_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_resources(**mock_args) + client.lock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" + "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" % client.transport._host, args[1], ) -def test_list_resources_rest_flattened_error(transport: str = "rest"): +def test_lock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7949,122 +10570,61 @@ def test_list_resources_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resources( - config.ListResourcesRequest(), - parent="parent_value", + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", ) -def test_list_resources_rest_pager(transport: str = "rest"): +def test_lock_deployment_rest_error(): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - config.Resource(), - ], - next_page_token="abc", - ), - config.ListResourcesResponse( - resources=[], - next_page_token="def", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - ], - next_page_token="ghi", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - - pager = client.list_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Resource) for i in results) - - pages = list(client.list_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - config.ExportDeploymentStatefileRequest, + config.UnlockDeploymentRequest, dict, ], ) -def test_export_deployment_statefile_rest(request_type): +def test_unlock_deployment_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_deployment_statefile(request) + response = client.unlock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert response.operation.name == "operations/spam" -def test_export_deployment_statefile_rest_required_fields( - request_type=config.ExportDeploymentStatefileRequest, +def test_unlock_deployment_rest_required_fields( + request_type=config.UnlockDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8079,21 +10639,24 @@ def test_export_deployment_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8102,7 +10665,7 @@ def test_export_deployment_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8122,32 +10685,37 @@ def test_export_deployment_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_deployment_statefile(request) + response = client.unlock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_deployment_statefile_rest_unset_required_fields(): +def test_unlock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_deployment_statefile_rest_interceptors(null_interceptor): +def test_unlock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8158,15 +10726,15 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_deployment_statefile" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_unlock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_deployment_statefile" + transports.ConfigRestInterceptor, "pre_unlock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportDeploymentStatefileRequest.pb( - config.ExportDeploymentStatefileRequest() - ) + pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8177,17 +10745,19 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = config.ExportDeploymentStatefileRequest() + request = config.UnlockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = operations_pb2.Operation() - client.export_deployment_statefile( + client.unlock_deployment( request, metadata=[ ("key", "val"), @@ -8199,8 +10769,8 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_deployment_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ExportDeploymentStatefileRequest +def test_unlock_deployment_rest_bad_request( + transport: str = "rest", request_type=config.UnlockDeploymentRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8208,22 +10778,81 @@ def test_export_deployment_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_deployment_statefile(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.unlock_deployment(request) + + +def test_unlock_deployment_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + lock_id=725, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.unlock_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" + % client.transport._host, + args[1], + ) + + +def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unlock_deployment( + config.UnlockDeploymentRequest(), + name="name_value", + lock_id=725, + ) -def test_export_deployment_statefile_rest_error(): +def test_unlock_deployment_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8232,52 +10861,58 @@ def test_export_deployment_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ExportRevisionStatefileRequest, + config.ExportLockInfoRequest, dict, ], ) -def test_export_revision_statefile_rest(request_type): +def test_export_lock_info_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", + return_value = config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_revision_statefile(request) + response = client.export_lock_info(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" -def test_export_revision_statefile_rest_required_fields( - request_type=config.ExportRevisionStatefileRequest, +def test_export_lock_info_rest_required_fields( + request_type=config.ExportLockInfoRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8292,21 +10927,21 @@ def test_export_revision_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8315,7 +10950,7 @@ def test_export_revision_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = config.LockInfo() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8327,40 +10962,39 @@ def test_export_revision_statefile_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_revision_statefile(request) + response = client.export_lock_info(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_revision_statefile_rest_unset_required_fields(): +def test_export_lock_info_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.export_lock_info._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_revision_statefile_rest_interceptors(null_interceptor): +def test_export_lock_info_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8371,15 +11005,13 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_revision_statefile" + transports.ConfigRestInterceptor, "post_export_lock_info" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_revision_statefile" + transports.ConfigRestInterceptor, "pre_export_lock_info" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportRevisionStatefileRequest.pb( - config.ExportRevisionStatefileRequest() - ) + pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8390,17 +11022,17 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = config.LockInfo.to_json(config.LockInfo()) - request = config.ExportRevisionStatefileRequest() + request = config.ExportLockInfoRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = config.LockInfo() - client.export_revision_statefile( + client.export_lock_info( request, metadata=[ ("key", "val"), @@ -8412,8 +11044,8 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_revision_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ExportRevisionStatefileRequest +def test_export_lock_info_rest_bad_request( + transport: str = "rest", request_type=config.ExportLockInfoRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8421,9 +11053,7 @@ def test_export_revision_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8435,10 +11065,69 @@ def test_export_revision_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_revision_statefile(request) + client.export_lock_info(request) -def test_export_revision_statefile_rest_error(): +def test_export_lock_info_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.LockInfo() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.LockInfo.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.export_lock_info(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" + % client.transport._host, + args[1], + ) + + +def test_export_lock_info_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_lock_info( + config.ExportLockInfoRequest(), + name="name_value", + ) + + +def test_export_lock_info_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8447,51 +11136,156 @@ def test_export_revision_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ImportStatefileRequest, + config.CreatePreviewRequest, dict, ], ) -def test_import_statefile_rest(request_type): +def test_create_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["preview"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, + "deployment": "deployment_value", + "preview_mode": 1, + "service_account": "service_account_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "worker_pool": "worker_pool_value", + "error_code": 1, + "error_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "build": "build_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": {}, + } + ], + "error_logs": "error_logs_value", + "preview_artifacts": { + "content": "content_value", + "artifacts": "artifacts_value", + }, + "logs": "logs_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreatePreviewRequest.meta.fields["preview"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["preview"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["preview"][field])): + del request_init["preview"][field][i][subfield] + else: + del request_init["preview"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_statefile(request) + response = client.create_preview(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert response.operation.name == "operations/spam" -def test_import_statefile_rest_required_fields( - request_type=config.ImportStatefileRequest, -): +def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["parent"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8506,24 +11300,28 @@ def test_import_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "preview_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8532,7 +11330,7 @@ def test_import_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8552,40 +11350,42 @@ def test_import_statefile_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_statefile(request) + response = client.create_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_statefile_rest_unset_required_fields(): +def test_create_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_statefile._get_unset_required_fields({}) + unset_fields = transport.create_preview._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "previewId", + "requestId", + ) + ) & set( ( "parent", - "lockId", + "preview", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_statefile_rest_interceptors(null_interceptor): +def test_create_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8596,13 +11396,15 @@ def test_import_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_import_statefile" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ConfigRestInterceptor, "post_create_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_import_statefile" + transports.ConfigRestInterceptor, "pre_create_preview" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) + pb_message = config.CreatePreviewRequest.pb(config.CreatePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8613,17 +11415,19 @@ def test_import_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.Statefile.to_json(config.Statefile()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = config.ImportStatefileRequest() + request = config.CreatePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() + post.return_value = operations_pb2.Operation() - client.import_statefile( + client.create_preview( request, metadata=[ ("key", "val"), @@ -8635,8 +11439,8 @@ def test_import_statefile_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_statefile_rest_bad_request( - transport: str = "rest", request_type=config.ImportStatefileRequest +def test_create_preview_rest_bad_request( + transport: str = "rest", request_type=config.CreatePreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8644,7 +11448,7 @@ def test_import_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8656,10 +11460,10 @@ def test_import_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_statefile(request) + client.create_preview(request) -def test_import_statefile_rest_flattened(): +def test_create_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8668,43 +11472,42 @@ def test_import_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( parent="parent_value", - lock_id=725, + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.import_statefile(**mock_args) + client.create_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_import_statefile_rest_flattened_error(transport: str = "rest"): +def test_create_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8713,14 +11516,18 @@ def test_import_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.import_statefile( - config.ImportStatefileRequest(), + client.create_preview( + config.CreatePreviewRequest(), parent="parent_value", - lock_id=725, + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) -def test_import_statefile_rest_error(): +def test_create_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8729,46 +11536,68 @@ def test_import_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.DeleteStatefileRequest, + config.GetPreviewRequest, dict, ], ) -def test_delete_statefile_rest(request_type): +def test_get_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_statefile(request) + response = client.get_preview(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" -def test_delete_statefile_rest_required_fields( - request_type=config.DeleteStatefileRequest, -): +def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8783,24 +11612,21 @@ def test_delete_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).get_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8809,7 +11635,7 @@ def test_delete_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8821,45 +11647,39 @@ def test_delete_statefile_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_statefile(request) + response = client.get_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_statefile_rest_unset_required_fields(): +def test_get_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_statefile._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "lockId", - ) - ) - ) + unset_fields = transport.get_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_statefile_rest_interceptors(null_interceptor): +def test_get_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -8870,10 +11690,13 @@ def test_delete_statefile_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_statefile" + transports.ConfigRestInterceptor, "post_get_preview" + ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_preview" ) as pre: pre.assert_not_called() - pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) + post.assert_not_called() + pb_message = config.GetPreviewRequest.pb(config.GetPreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8884,15 +11707,17 @@ def test_delete_statefile_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = config.Preview.to_json(config.Preview()) - request = config.DeleteStatefileRequest() + request = config.GetPreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = config.Preview() - client.delete_statefile( + client.get_preview( request, metadata=[ ("key", "val"), @@ -8901,10 +11726,11 @@ def test_delete_statefile_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_statefile_rest_bad_request( - transport: str = "rest", request_type=config.DeleteStatefileRequest +def test_get_preview_rest_bad_request( + transport: str = "rest", request_type=config.GetPreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8912,7 +11738,7 @@ def test_delete_statefile_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8924,10 +11750,10 @@ def test_delete_statefile_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_statefile(request) + client.get_preview(request) -def test_delete_statefile_rest_flattened(): +def test_get_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8936,12 +11762,10 @@ def test_delete_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.Preview() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -8952,24 +11776,25 @@ def test_delete_statefile_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_statefile(**mock_args) + client.get_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, args[1], ) -def test_delete_statefile_rest_flattened_error(transport: str = "rest"): +def test_get_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8978,13 +11803,13 @@ def test_delete_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_statefile( - config.DeleteStatefileRequest(), + client.get_preview( + config.GetPreviewRequest(), name="name_value", ) -def test_delete_statefile_rest_error(): +def test_get_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8993,45 +11818,50 @@ def test_delete_statefile_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.LockDeploymentRequest, + config.ListPreviewsRequest, dict, ], ) -def test_lock_deployment_rest(request_type): +def test_list_previews_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.lock_deployment(request) + response = client.list_previews(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.ListPreviewsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_lock_deployment_rest_required_fields( - request_type=config.LockDeploymentRequest, -): +def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9046,21 +11876,30 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).list_previews._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9069,7 +11908,7 @@ def test_lock_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9081,37 +11920,49 @@ def test_lock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.lock_deployment(request) + response = client.list_previews(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lock_deployment_rest_unset_required_fields(): +def test_list_previews_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lock_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_previews._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lock_deployment_rest_interceptors(null_interceptor): +def test_list_previews_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9122,15 +11973,13 @@ def test_lock_deployment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ConfigRestInterceptor, "post_lock_deployment" + transports.ConfigRestInterceptor, "post_list_previews" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_lock_deployment" + transports.ConfigRestInterceptor, "pre_list_previews" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) + pb_message = config.ListPreviewsRequest.pb(config.ListPreviewsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9141,19 +11990,19 @@ def test_lock_deployment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = config.ListPreviewsResponse.to_json( + config.ListPreviewsResponse() ) - request = config.LockDeploymentRequest() + request = config.ListPreviewsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = config.ListPreviewsResponse() - client.lock_deployment( + client.list_previews( request, metadata=[ ("key", "val"), @@ -9165,8 +12014,8 @@ def test_lock_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_lock_deployment_rest_bad_request( - transport: str = "rest", request_type=config.LockDeploymentRequest +def test_list_previews_rest_bad_request( + transport: str = "rest", request_type=config.ListPreviewsRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9174,7 +12023,7 @@ def test_lock_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9186,10 +12035,10 @@ def test_lock_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.lock_deployment(request) + client.list_previews(request) -def test_lock_deployment_rest_flattened(): +def test_list_previews_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9198,40 +12047,39 @@ def test_lock_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListPreviewsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.lock_deployment(**mock_args) + client.list_previews(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_lock_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_previews_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9240,33 +12088,88 @@ def test_lock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.lock_deployment( - config.LockDeploymentRequest(), - name="name_value", + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", ) -def test_lock_deployment_rest_error(): +def test_list_previews_rest_pager(transport: str = "rest"): client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_previews(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) + + pages = list(client.list_previews(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - config.UnlockDeploymentRequest, + config.DeletePreviewRequest, dict, ], ) -def test_unlock_deployment_rest(request_type): +def test_delete_preview_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9281,20 +12184,17 @@ def test_unlock_deployment_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.unlock_deployment(request) + response = client.delete_preview(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_unlock_deployment_rest_required_fields( - request_type=config.UnlockDeploymentRequest, -): +def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" - request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9309,24 +12209,23 @@ def test_unlock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).delete_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9347,10 +12246,9 @@ def test_unlock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -9360,32 +12258,24 @@ def test_unlock_deployment_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.unlock_deployment(request) + response = client.delete_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_unlock_deployment_rest_unset_required_fields(): +def test_delete_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.unlock_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "lockId", - ) - ) - ) + unset_fields = transport.delete_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_unlock_deployment_rest_interceptors(null_interceptor): +def test_delete_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9398,13 +12288,13 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ConfigRestInterceptor, "post_unlock_deployment" + transports.ConfigRestInterceptor, "post_delete_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_unlock_deployment" + transports.ConfigRestInterceptor, "pre_delete_preview" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) + pb_message = config.DeletePreviewRequest.pb(config.DeletePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9419,7 +12309,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = config.UnlockDeploymentRequest() + request = config.DeletePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9427,7 +12317,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.unlock_deployment( + client.delete_preview( request, metadata=[ ("key", "val"), @@ -9439,8 +12329,8 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_unlock_deployment_rest_bad_request( - transport: str = "rest", request_type=config.UnlockDeploymentRequest +def test_delete_preview_rest_bad_request( + transport: str = "rest", request_type=config.DeletePreviewRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9448,7 +12338,7 @@ def test_unlock_deployment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9460,10 +12350,10 @@ def test_unlock_deployment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.unlock_deployment(request) + client.delete_preview(request) -def test_unlock_deployment_rest_flattened(): +def test_delete_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9475,14 +12365,11 @@ def test_unlock_deployment_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} # get truthy value for each flattened field mock_args = dict( name="name_value", - lock_id=725, ) mock_args.update(sample_request) @@ -9493,20 +12380,19 @@ def test_unlock_deployment_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.unlock_deployment(**mock_args) + client.delete_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, args[1], ) -def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): +def test_delete_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9515,14 +12401,13 @@ def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.unlock_deployment( - config.UnlockDeploymentRequest(), + client.delete_preview( + config.DeletePreviewRequest(), name="name_value", - lock_id=725, ) -def test_unlock_deployment_rest_error(): +def test_delete_preview_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9531,58 +12416,47 @@ def test_unlock_deployment_rest_error(): @pytest.mark.parametrize( "request_type", [ - config.ExportLockInfoRequest, + config.ExportPreviewResultRequest, dict, ], ) -def test_export_lock_info_rest(request_type): +def test_export_preview_result_rest(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.LockInfo( - lock_id=725, - operation="operation_value", - info="info_value", - who="who_value", - version="version_value", - ) + return_value = config.ExportPreviewResultResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.ExportPreviewResultResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_lock_info(request) + response = client.export_preview_result(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.LockInfo) - assert response.lock_id == 725 - assert response.operation == "operation_value" - assert response.info == "info_value" - assert response.who == "who_value" - assert response.version == "version_value" + assert isinstance(response, config.ExportPreviewResultResponse) -def test_export_lock_info_rest_required_fields( - request_type=config.ExportLockInfoRequest, +def test_export_preview_result_rest_required_fields( + request_type=config.ExportPreviewResultRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9597,21 +12471,21 @@ def test_export_lock_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).export_preview_result._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9620,7 +12494,7 @@ def test_export_lock_info_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = config.ExportPreviewResultResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9632,39 +12506,40 @@ def test_export_lock_info_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.ExportPreviewResultResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_lock_info(request) + response = client.export_preview_result(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_lock_info_rest_unset_required_fields(): +def test_export_preview_result_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_lock_info._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_preview_result._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_lock_info_rest_interceptors(null_interceptor): +def test_export_preview_result_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -9675,13 +12550,15 @@ def test_export_lock_info_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_lock_info" + transports.ConfigRestInterceptor, "post_export_preview_result" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_lock_info" + transports.ConfigRestInterceptor, "pre_export_preview_result" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) + pb_message = config.ExportPreviewResultRequest.pb( + config.ExportPreviewResultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9692,17 +12569,19 @@ def test_export_lock_info_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = config.LockInfo.to_json(config.LockInfo()) + req.return_value._content = config.ExportPreviewResultResponse.to_json( + config.ExportPreviewResultResponse() + ) - request = config.ExportLockInfoRequest() + request = config.ExportPreviewResultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.LockInfo() + post.return_value = config.ExportPreviewResultResponse() - client.export_lock_info( + client.export_preview_result( request, metadata=[ ("key", "val"), @@ -9714,8 +12593,8 @@ def test_export_lock_info_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_lock_info_rest_bad_request( - transport: str = "rest", request_type=config.ExportLockInfoRequest +def test_export_preview_result_rest_bad_request( + transport: str = "rest", request_type=config.ExportPreviewResultRequest ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9723,7 +12602,7 @@ def test_export_lock_info_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9735,69 +12614,10 @@ def test_export_lock_info_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_lock_info(request) - - -def test_export_lock_info_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.LockInfo() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.export_lock_info(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" - % client.transport._host, - args[1], - ) - - -def test_export_lock_info_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_lock_info( - config.ExportLockInfoRequest(), - name="name_value", - ) + client.export_preview_result(request) -def test_export_lock_info_rest_error(): +def test_export_preview_result_rest_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9958,6 +12778,11 @@ def test_config_base_transport(): "lock_deployment", "unlock_deployment", "export_lock_info", + "create_preview", + "get_preview", + "list_previews", + "delete_preview", + "export_preview_result", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -10292,6 +13117,21 @@ def test_config_client_transport_session_collision(transport_name): session1 = client1.transport.export_lock_info._session session2 = client2.transport.export_lock_info._session assert session1 != session2 + session1 = client1.transport.create_preview._session + session2 = client2.transport.create_preview._session + assert session1 != session2 + session1 = client1.transport.get_preview._session + session2 = client2.transport.get_preview._session + assert session1 != session2 + session1 = client1.transport.list_previews._session + session2 = client2.transport.list_previews._session + assert session1 != session2 + session1 = client1.transport.delete_preview._session + session2 = client2.transport.delete_preview._session + assert session1 != session2 + session1 = client1.transport.export_preview_result._session + session2 = client2.transport.export_preview_result._session + assert session1 != session2 def test_config_grpc_transport_channel(): @@ -10474,12 +13314,38 @@ def test_parse_deployment_path(): assert expected == actual -def test_resource_path(): +def test_preview_path(): project = "cuttlefish" location = "mussel" - deployment = "winkle" - revision = "nautilus" - resource = "scallop" + preview = "winkle" + expected = "projects/{project}/locations/{location}/previews/{preview}".format( + project=project, + location=location, + preview=preview, + ) + actual = ConfigClient.preview_path(project, location, preview) + assert expected == actual + + +def test_parse_preview_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "preview": "abalone", + } + path = ConfigClient.preview_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_preview_path(path) + assert expected == actual + + +def test_resource_path(): + project = "squid" + location = "clam" + deployment = "whelk" + revision = "octopus" + resource = "oyster" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( project=project, location=location, @@ -10495,11 +13361,11 @@ def test_resource_path(): def test_parse_resource_path(): expected = { - "project": "abalone", - "location": "squid", - "deployment": "clam", - "revision": "whelk", - "resource": "octopus", + "project": "nudibranch", + "location": "cuttlefish", + "deployment": "mussel", + "revision": "winkle", + "resource": "nautilus", } path = ConfigClient.resource_path(**expected) @@ -10509,10 +13375,10 @@ def test_parse_resource_path(): def test_revision_path(): - project = "oyster" - location = "nudibranch" - deployment = "cuttlefish" - revision = "mussel" + project = "scallop" + location = "abalone" + deployment = "squid" + revision = "clam" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( project=project, location=location, @@ -10525,10 +13391,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "winkle", - "location": "nautilus", - "deployment": "scallop", - "revision": "abalone", + "project": "whelk", + "location": "octopus", + "deployment": "oyster", + "revision": "nudibranch", } path = ConfigClient.revision_path(**expected) @@ -10538,8 +13404,8 @@ def test_parse_revision_path(): def test_service_account_path(): - project = "squid" - service_account = "clam" + project = "cuttlefish" + service_account = "mussel" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -10550,8 +13416,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "whelk", - "service_account": "octopus", + "project": "winkle", + "service_account": "nautilus", } path = ConfigClient.service_account_path(**expected) @@ -10561,9 +13427,9 @@ def test_parse_service_account_path(): def test_worker_pool_path(): - project = "oyster" - location = "nudibranch" - worker_pool = "cuttlefish" + project = "scallop" + location = "abalone" + worker_pool = "squid" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -10577,9 +13443,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "mussel", - "location": "winkle", - "worker_pool": "nautilus", + "project": "clam", + "location": "whelk", + "worker_pool": "octopus", } path = ConfigClient.worker_pool_path(**expected) @@ -10589,7 +13455,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -10599,7 +13465,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nudibranch", } path = ConfigClient.common_billing_account_path(**expected) @@ -10609,7 +13475,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -10619,7 +13485,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "mussel", } path = ConfigClient.common_folder_path(**expected) @@ -10629,7 +13495,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -10639,7 +13505,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "nautilus", } path = ConfigClient.common_organization_path(**expected) @@ -10649,7 +13515,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -10659,7 +13525,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "abalone", } path = ConfigClient.common_project_path(**expected) @@ -10669,8 +13535,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -10681,8 +13547,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "whelk", + "location": "octopus", } path = ConfigClient.common_location_path(**expected)