diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py index 550d56ca1cc1..f4a35071ac18 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py @@ -59,10 +59,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -76,6 +78,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -84,7 +88,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -102,6 +109,10 @@ ListContentResponse, UpdateContentRequest, ) +from google.cloud.dataplex_v1.types.data_discovery import ( + DataDiscoveryResult, + DataDiscoverySpec, +) from google.cloud.dataplex_v1.types.data_profile import ( DataProfileResult, DataProfileSpec, @@ -259,10 +270,12 @@ "Aspect", "AspectSource", "AspectType", + "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateEntryTypeRequest", + "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -275,6 +288,8 @@ "GetEntryGroupRequest", "GetEntryRequest", "GetEntryTypeRequest", + "GetMetadataJobRequest", + "ImportItem", "ListAspectTypesRequest", "ListAspectTypesResponse", "ListEntriesRequest", @@ -283,7 +298,10 @@ "ListEntryGroupsResponse", "ListEntryTypesRequest", "ListEntryTypesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "LookupEntryRequest", + "MetadataJob", "SearchEntriesRequest", "SearchEntriesResponse", "SearchEntriesResult", @@ -299,6 +317,8 @@ "ListContentRequest", "ListContentResponse", "UpdateContentRequest", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index dd79fdad215c..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py index d4fbe71a5618..304e760fd71c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py @@ -35,10 +35,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -52,6 +54,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -60,7 +64,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -78,6 +85,7 @@ ListContentResponse, UpdateContentRequest, ) +from .types.data_discovery import DataDiscoveryResult, DataDiscoverySpec from .types.data_profile import DataProfileResult, DataProfileSpec from .types.data_quality import ( DataQualityColumnResult, @@ -220,6 +228,7 @@ "Asset", "AssetStatus", "CancelJobRequest", + "CancelMetadataJobRequest", "CatalogServiceClient", "Content", "ContentServiceClient", @@ -236,12 +245,15 @@ "CreateEntryTypeRequest", "CreateEnvironmentRequest", "CreateLakeRequest", + "CreateMetadataJobRequest", "CreatePartitionRequest", "CreateTaskRequest", "CreateZoneRequest", "DataAccessSpec", "DataAttribute", "DataAttributeBinding", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", @@ -302,10 +314,12 @@ "GetEnvironmentRequest", "GetJobRequest", "GetLakeRequest", + "GetMetadataJobRequest", "GetPartitionRequest", "GetTaskRequest", "GetZoneRequest", "GovernanceEvent", + "ImportItem", "Job", "JobEvent", "Lake", @@ -342,6 +356,8 @@ "ListLakeActionsRequest", "ListLakesRequest", "ListLakesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "ListPartitionsRequest", "ListPartitionsResponse", "ListSessionsRequest", @@ -352,6 +368,7 @@ "ListZonesRequest", "ListZonesResponse", "LookupEntryRequest", + "MetadataJob", "MetadataServiceClient", "OperationMetadata", "Partition", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json index dcc696790241..9fb1150241c8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "CatalogServiceClient", "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, "CreateAspectType": { "methods": [ "create_aspect_type" @@ -30,6 +35,11 @@ "create_entry_type" ] }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, "DeleteAspectType": { "methods": [ "delete_aspect_type" @@ -70,6 +80,11 @@ "get_entry_type" ] }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, "ListAspectTypes": { "methods": [ "list_aspect_types" @@ -90,6 +105,11 @@ "list_entry_types" ] }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, "LookupEntry": { "methods": [ "lookup_entry" @@ -125,6 +145,11 @@ "grpc-async": { "libraryClient": "CatalogServiceAsyncClient", "rpcs": { + "CancelMetadataJob": { + "methods": [ + "cancel_metadata_job" + ] + }, "CreateAspectType": { "methods": [ "create_aspect_type" @@ -145,6 +170,11 @@ "create_entry_type" ] }, + "CreateMetadataJob": { + "methods": [ + "create_metadata_job" + ] + }, "DeleteAspectType": { "methods": [ "delete_aspect_type" @@ -185,6 +215,11 @@ "get_entry_type" ] }, + "GetMetadataJob": { + "methods": [ + "get_metadata_job" + ] + }, "ListAspectTypes": { "methods": [ "list_aspect_types" @@ -205,6 +240,11 @@ "list_entry_types" ] }, + "ListMetadataJobs": { + "methods": [ + "list_metadata_jobs" + ] + }, "LookupEntry": { "methods": [ "lookup_entry" diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index dd79fdad215c..558c8aab67c5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.3.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 76bad2927899..f5523aa8de39 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -62,10 +62,10 @@ class CatalogServiceAsyncClient: """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -86,6 +86,8 @@ class CatalogServiceAsyncClient: parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) + parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) common_billing_account_path = staticmethod( CatalogServiceClient.common_billing_account_path ) @@ -287,7 +289,7 @@ async def create_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an EntryType + r"""Creates an EntryType. .. code-block:: python @@ -322,17 +324,17 @@ async def sample_create_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]]): - The request object. Create EntryType Request + The request object. Create EntryType Request. parent (:class:`str`): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -424,7 +426,7 @@ async def update_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a EntryType resource. + r"""Updates an EntryType. .. code-block:: python @@ -457,9 +459,9 @@ async def sample_update_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]]): - The request object. Update EntryType Request + The request object. Update EntryType Request. entry_type (:class:`google.cloud.dataplex_v1.types.EntryType`): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -550,7 +552,7 @@ async def delete_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a EntryType resource. + r"""Deletes an EntryType. .. code-block:: python @@ -584,7 +586,7 @@ async def sample_delete_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delele EntryType Request + The request object. Delele EntryType Request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -713,7 +715,7 @@ async def sample_list_entry_types(): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -726,7 +728,7 @@ async def sample_list_entry_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesAsyncPager: - List EntryTypes response + List EntryTypes response. Iterating over this object will yield results and resolve additional pages @@ -799,7 +801,7 @@ async def get_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryType: - r"""Retrieves a EntryType resource. + r"""Gets an EntryType. .. code-block:: python @@ -829,7 +831,7 @@ async def sample_get_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]]): - The request object. Get EntryType request + The request object. Get EntryType request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -906,7 +908,7 @@ async def create_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an AspectType + r"""Creates an AspectType. .. code-block:: python @@ -946,17 +948,17 @@ async def sample_create_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]]): - The request object. Create AspectType Request + The request object. Create AspectType Request. parent (:class:`str`): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. aspect_type (:class:`google.cloud.dataplex_v1.types.AspectType`): - Required. AspectType Resource + Required. AspectType Resource. This corresponds to the ``aspect_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -975,9 +977,9 @@ async def sample_create_aspect_type(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1048,7 +1050,7 @@ async def update_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a AspectType resource. + r"""Updates an AspectType. .. code-block:: python @@ -1107,9 +1109,9 @@ async def sample_update_aspect_type(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1179,7 +1181,7 @@ async def delete_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a AspectType resource. + r"""Deletes an AspectType. .. code-block:: python @@ -1213,7 +1215,7 @@ async def sample_delete_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delele AspectType Request + The request object. Delele AspectType Request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1337,12 +1339,12 @@ async def sample_list_aspect_types(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]]): - The request object. List AspectTypes request + The request object. List AspectTypes request. parent (:class:`str`): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1355,7 +1357,7 @@ async def sample_list_aspect_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesAsyncPager: - List AspectTypes response + List AspectTypes response. Iterating over this object will yield results and resolve additional pages @@ -1428,7 +1430,7 @@ async def get_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.AspectType: - r"""Retrieves a AspectType resource. + r"""Gets an AspectType. .. code-block:: python @@ -1458,7 +1460,7 @@ async def sample_get_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]]): - The request object. Get AspectType request + The request object. Get AspectType request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1474,10 +1476,10 @@ async def sample_get_aspect_type(): Returns: google.cloud.dataplex_v1.types.AspectType: - Aspect Type is a template for - creating Aspects, and represents the - JSON-schema for a given Entry, e.g., - BigQuery Table Schema. + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1537,7 +1539,7 @@ async def create_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates an EntryGroup + r"""Creates an EntryGroup. .. code-block:: python @@ -1572,7 +1574,7 @@ async def sample_create_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]]): - The request object. Create EntryGroup Request + The request object. Create EntryGroup Request. parent (:class:`str`): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} @@ -1582,7 +1584,7 @@ async def sample_create_entry_group(): on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1675,7 +1677,7 @@ async def update_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates a EntryGroup resource. + r"""Updates an EntryGroup. .. code-block:: python @@ -1708,9 +1710,9 @@ async def sample_update_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]]): - The request object. Update EntryGroup Request + The request object. Update EntryGroup Request. entry_group (:class:`google.cloud.dataplex_v1.types.EntryGroup`): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1802,7 +1804,7 @@ async def delete_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Deletes a EntryGroup resource. + r"""Deletes an EntryGroup. .. code-block:: python @@ -1836,7 +1838,7 @@ async def sample_delete_entry_group(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]]): - The request object. Delele EntryGroup Request + The request object. Delete EntryGroup Request. name (:class:`str`): Required. The resource name of the EntryGroup: ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. @@ -1965,7 +1967,7 @@ async def sample_list_entry_groups(): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1978,7 +1980,7 @@ async def sample_list_entry_groups(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsAsyncPager: - List ListEntryGroups response. + List entry groups response. Iterating over this object will yield results and resolve additional pages @@ -2051,7 +2053,7 @@ async def get_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryGroup: - r"""Retrieves a EntryGroup resource. + r"""Gets an EntryGroup. .. code-block:: python @@ -2193,7 +2195,7 @@ async def sample_create_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]]): - The request object. + The request object. Create Entry request. parent (:class:`str`): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2210,22 +2212,23 @@ async def sample_create_entry(): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use - Entry ID format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource - without the prefix double slashes in the API Service - Name part of Full Resource Name. This allows retrieval - of entries using their associated resource name. + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers - or systems than Google Cloud. + entries corresponding to resources from providers or + systems other than Google Cloud. The maximum size of the field is 4000 characters. @@ -2241,7 +2244,7 @@ async def sample_create_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2338,7 +2341,7 @@ async def sample_update_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]]): - The request object. + The request object. Update Entry request. entry (:class:`google.cloud.dataplex_v1.types.Entry`): Required. Entry resource. This corresponds to the ``entry`` field @@ -2348,8 +2351,8 @@ async def sample_update_entry(): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields - present in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2363,7 +2366,7 @@ async def sample_update_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2456,7 +2459,7 @@ async def sample_delete_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]]): - The request object. + The request object. Delete Entry request. name (:class:`str`): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2473,7 +2476,7 @@ async def sample_delete_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2532,7 +2535,7 @@ async def list_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEntriesAsyncPager: - r"""Lists entries within an entry group. + r"""Lists Entries within an EntryGroup. .. code-block:: python @@ -2563,7 +2566,7 @@ async def sample_list_entries(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]]): - The request object. + The request object. List Entries request. parent (:class:`str`): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2579,6 +2582,8 @@ async def sample_list_entries(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesAsyncPager: + List Entries response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -2650,7 +2655,12 @@ async def get_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Gets a single entry. + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -2680,7 +2690,7 @@ async def sample_get_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]]): - The request object. + The request object. Get Entry request. name (:class:`str`): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2697,7 +2707,7 @@ async def sample_get_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2755,7 +2765,13 @@ async def lookup_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Looks up a single entry. + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -2786,7 +2802,8 @@ async def sample_lookup_entry(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]]): - The request object. + The request object. Lookup Entry request using + permissions in the source system. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2796,7 +2813,7 @@ async def sample_lookup_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2842,7 +2859,8 @@ async def search_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchEntriesAsyncPager: - r"""Searches for entries matching given query and scope. + r"""Searches for Entries matching the given query and + scope. .. code-block:: python @@ -2962,6 +2980,479 @@ async def sample_search_entries(): # Done; return the response. return response + async def create_metadata_job( + self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]]): + The request object. Create metadata job request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (:class:`google.cloud.dataplex_v1.types.MetadataJob`): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (:class:`str`): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_metadata_job( + self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]]): + The request object. Get metadata job request. + name (:class:`str`): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_metadata_jobs( + self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsAsyncPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]]): + The request object. List metadata jobs request. + parent (:class:`str`): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_metadata_jobs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMetadataJobsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_metadata_job( + self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]]): + The request object. Cancel metadata job request. + name (:class:`str`): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_metadata_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 6c5aeb8272cf..9ad66817729a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -104,10 +104,10 @@ def get_transport_class( class CatalogServiceClient(metaclass=CatalogServiceClientMeta): """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -291,6 +291,30 @@ def parse_entry_type_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def metadata_job_path( + project: str, + location: str, + metadataJob: str, + ) -> str: + """Returns a fully-qualified metadata_job string.""" + return ( + "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( + project=project, + location=location, + metadataJob=metadataJob, + ) + ) + + @staticmethod + def parse_metadata_job_path(path: str) -> Dict[str, str]: + """Parses a metadata_job path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/metadataJobs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -740,7 +764,7 @@ def create_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an EntryType + r"""Creates an EntryType. .. code-block:: python @@ -775,17 +799,17 @@ def sample_create_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryTypeRequest, dict]): - The request object. Create EntryType Request + The request object. Create EntryType Request. parent (str): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -874,7 +898,7 @@ def update_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a EntryType resource. + r"""Updates an EntryType. .. code-block:: python @@ -907,9 +931,9 @@ def sample_update_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryTypeRequest, dict]): - The request object. Update EntryType Request + The request object. Update EntryType Request. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. This corresponds to the ``entry_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -997,7 +1021,7 @@ def delete_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a EntryType resource. + r"""Deletes an EntryType. .. code-block:: python @@ -1031,7 +1055,7 @@ def sample_delete_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delele EntryType Request + The request object. Delele EntryType Request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1157,7 +1181,7 @@ def sample_list_entry_types(): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1170,7 +1194,7 @@ def sample_list_entry_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryTypesPager: - List EntryTypes response + List EntryTypes response. Iterating over this object will yield results and resolve additional pages @@ -1240,7 +1264,7 @@ def get_entry_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryType: - r"""Retrieves a EntryType resource. + r"""Gets an EntryType. .. code-block:: python @@ -1270,7 +1294,7 @@ def sample_get_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.GetEntryTypeRequest, dict]): - The request object. Get EntryType request + The request object. Get EntryType request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1344,7 +1368,7 @@ def create_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an AspectType + r"""Creates an AspectType. .. code-block:: python @@ -1384,17 +1408,17 @@ def sample_create_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.CreateAspectTypeRequest, dict]): - The request object. Create AspectType Request + The request object. Create AspectType Request. parent (str): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource + Required. AspectType Resource. This corresponds to the ``aspect_type`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1413,9 +1437,9 @@ def sample_create_aspect_type(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1483,7 +1507,7 @@ def update_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a AspectType resource. + r"""Updates an AspectType. .. code-block:: python @@ -1542,9 +1566,9 @@ def sample_update_aspect_type(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` Aspect Type is a template for creating Aspects, and represents the - JSON-schema for a given Entry, e.g., BigQuery Table - Schema. + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.AspectType` AspectType is a template for creating Aspects, and represents the + JSON-schema for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1611,7 +1635,7 @@ def delete_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a AspectType resource. + r"""Deletes an AspectType. .. code-block:: python @@ -1645,7 +1669,7 @@ def sample_delete_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delele AspectType Request + The request object. Delele AspectType Request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1766,12 +1790,12 @@ def sample_list_aspect_types(): Args: request (Union[google.cloud.dataplex_v1.types.ListAspectTypesRequest, dict]): - The request object. List AspectTypes request + The request object. List AspectTypes request. parent (str): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1784,7 +1808,7 @@ def sample_list_aspect_types(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListAspectTypesPager: - List AspectTypes response + List AspectTypes response. Iterating over this object will yield results and resolve additional pages @@ -1854,7 +1878,7 @@ def get_aspect_type( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.AspectType: - r"""Retrieves a AspectType resource. + r"""Gets an AspectType. .. code-block:: python @@ -1884,7 +1908,7 @@ def sample_get_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.GetAspectTypeRequest, dict]): - The request object. Get AspectType request + The request object. Get AspectType request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1900,10 +1924,10 @@ def sample_get_aspect_type(): Returns: google.cloud.dataplex_v1.types.AspectType: - Aspect Type is a template for - creating Aspects, and represents the - JSON-schema for a given Entry, e.g., - BigQuery Table Schema. + AspectType is a template for creating + Aspects, and represents the JSON-schema + for a given Entry, for example, BigQuery + Table Schema. """ # Create or coerce a protobuf request object. @@ -1960,7 +1984,7 @@ def create_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates an EntryGroup + r"""Creates an EntryGroup. .. code-block:: python @@ -1995,7 +2019,7 @@ def sample_create_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryGroupRequest, dict]): - The request object. Create EntryGroup Request + The request object. Create EntryGroup Request. parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} @@ -2005,7 +2029,7 @@ def sample_create_entry_group(): on the ``request`` instance; if ``request`` is provided, this should not be set. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2095,7 +2119,7 @@ def update_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates a EntryGroup resource. + r"""Updates an EntryGroup. .. code-block:: python @@ -2128,9 +2152,9 @@ def sample_update_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryGroupRequest, dict]): - The request object. Update EntryGroup Request + The request object. Update EntryGroup Request. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. This corresponds to the ``entry_group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2219,7 +2243,7 @@ def delete_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Deletes a EntryGroup resource. + r"""Deletes an EntryGroup. .. code-block:: python @@ -2253,7 +2277,7 @@ def sample_delete_entry_group(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryGroupRequest, dict]): - The request object. Delele EntryGroup Request + The request object. Delete EntryGroup Request. name (str): Required. The resource name of the EntryGroup: ``projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}``. @@ -2379,7 +2403,7 @@ def sample_list_entry_groups(): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -2392,7 +2416,7 @@ def sample_list_entry_groups(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntryGroupsPager: - List ListEntryGroups response. + List entry groups response. Iterating over this object will yield results and resolve additional pages @@ -2462,7 +2486,7 @@ def get_entry_group( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.EntryGroup: - r"""Retrieves a EntryGroup resource. + r"""Gets an EntryGroup. .. code-block:: python @@ -2601,7 +2625,7 @@ def sample_create_entry(): Args: request (Union[google.cloud.dataplex_v1.types.CreateEntryRequest, dict]): - The request object. + The request object. Create Entry request. parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2618,22 +2642,23 @@ def sample_create_entry(): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use - Entry ID format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource - without the prefix double slashes in the API Service - Name part of Full Resource Name. This allows retrieval - of entries using their associated resource name. + Entries corresponding to Google Cloud resources use an + Entry ID format based on `full resource + names `__. + The format is a full resource name of the resource + without the prefix double slashes in the API service + name part of the full resource name. This allows + retrieval of entries using their associated resource + name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers - or systems than Google Cloud. + entries corresponding to resources from providers or + systems other than Google Cloud. The maximum size of the field is 4000 characters. @@ -2649,7 +2674,7 @@ def sample_create_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2743,7 +2768,7 @@ def sample_update_entry(): Args: request (Union[google.cloud.dataplex_v1.types.UpdateEntryRequest, dict]): - The request object. + The request object. Update Entry request. entry (google.cloud.dataplex_v1.types.Entry): Required. Entry resource. This corresponds to the ``entry`` field @@ -2753,8 +2778,8 @@ def sample_update_entry(): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields - present in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2768,7 +2793,7 @@ def sample_update_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2858,7 +2883,7 @@ def sample_delete_entry(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryRequest, dict]): - The request object. + The request object. Delete Entry request. name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -2875,7 +2900,7 @@ def sample_delete_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -2931,7 +2956,7 @@ def list_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEntriesPager: - r"""Lists entries within an entry group. + r"""Lists Entries within an EntryGroup. .. code-block:: python @@ -2962,7 +2987,7 @@ def sample_list_entries(): Args: request (Union[google.cloud.dataplex_v1.types.ListEntriesRequest, dict]): - The request object. + The request object. List Entries request. parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. @@ -2978,6 +3003,8 @@ def sample_list_entries(): Returns: google.cloud.dataplex_v1.services.catalog_service.pagers.ListEntriesPager: + List Entries response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -3046,7 +3073,12 @@ def get_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Gets a single entry. + r"""Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -3076,7 +3108,7 @@ def sample_get_entry(): Args: request (Union[google.cloud.dataplex_v1.types.GetEntryRequest, dict]): - The request object. + The request object. Get Entry request. name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -3093,7 +3125,7 @@ def sample_get_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -3148,7 +3180,13 @@ def lookup_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> catalog.Entry: - r"""Looks up a single entry. + r"""Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. .. code-block:: python @@ -3179,7 +3217,8 @@ def sample_lookup_entry(): Args: request (Union[google.cloud.dataplex_v1.types.LookupEntryRequest, dict]): - The request object. + The request object. Lookup Entry request using + permissions in the source system. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3189,7 +3228,7 @@ def sample_lookup_entry(): Returns: google.cloud.dataplex_v1.types.Entry: An entry is a representation of a - data asset which can be described by + data resource that can be described by various metadata. """ @@ -3233,7 +3272,8 @@ def search_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchEntriesPager: - r"""Searches for entries matching given query and scope. + r"""Searches for Entries matching the given query and + scope. .. code-block:: python @@ -3350,6 +3390,467 @@ def sample_search_entries(): # Done; return the response. return response + def create_metadata_job( + self, + request: Optional[Union[catalog.CreateMetadataJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + metadata_job: Optional[catalog.MetadataJob] = None, + metadata_job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateMetadataJobRequest, dict]): + The request object. Create metadata job request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + This corresponds to the ``metadata_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique + ID is generated with the prefix ``metadata-job-``. + + This corresponds to the ``metadata_job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataplex_v1.types.MetadataJob` A + metadata job resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metadata_job, metadata_job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateMetadataJobRequest): + request = catalog.CreateMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metadata_job is not None: + request.metadata_job = metadata_job + if metadata_job_id is not None: + request.metadata_job_id = metadata_job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + catalog.MetadataJob, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_metadata_job( + self, + request: Optional[Union[catalog.GetMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> catalog.MetadataJob: + r"""Gets a metadata job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetMetadataJobRequest, dict]): + The request object. Get metadata job request. + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.MetadataJob: + A metadata job resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetMetadataJobRequest): + request = catalog.GetMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_metadata_jobs( + self, + request: Optional[Union[catalog.ListMetadataJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMetadataJobsPager: + r"""Lists metadata jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListMetadataJobsRequest, dict]): + The request object. List metadata jobs request. + parent (str): + Required. The resource name of the parent location, in + the format + ``projects/{project_id_or_number}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager: + List metadata jobs response. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.ListMetadataJobsRequest): + request = catalog.ListMetadataJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_metadata_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMetadataJobsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_metadata_job( + self, + request: Optional[Union[catalog.CancelMetadataJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.CancelMetadataJobRequest, dict]): + The request object. Cancel metadata job request. + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CancelMetadataJobRequest): + request = catalog.CancelMetadataJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_metadata_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def __enter__(self) -> "CatalogServiceClient": return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py index 48a7fc8b34a7..27874bec60df 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/pagers.py @@ -799,3 +799,155 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataJobsPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., catalog.ListMetadataJobsResponse], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[catalog.MetadataJob]: + for page in self.pages: + yield from page.metadata_jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMetadataJobsAsyncPager: + """A pager for iterating through ``list_metadata_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metadata_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMetadataJobs`` requests and continue to iterate + through the ``metadata_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListMetadataJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[catalog.ListMetadataJobsResponse]], + request: catalog.ListMetadataJobsRequest, + response: catalog.ListMetadataJobsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListMetadataJobsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListMetadataJobsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = catalog.ListMetadataJobsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[catalog.ListMetadataJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[catalog.MetadataJob]: + async def async_generator(): + async for page in self.pages: + for response in page.metadata_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py index 7e054f2e5a81..eb6d4b7b6619 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -27,6 +27,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.cloud.dataplex_v1 import gapic_version as package_version from google.cloud.dataplex_v1.types import catalog @@ -352,6 +353,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_metadata_job: gapic_v1.method.wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: gapic_v1.method.wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: gapic_v1.method.wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: gapic_v1.method.wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -597,6 +618,45 @@ def search_entries( ]: raise NotImplementedError() + @property + def create_metadata_job( + self, + ) -> Callable[ + [catalog.CreateMetadataJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_metadata_job( + self, + ) -> Callable[ + [catalog.GetMetadataJobRequest], + Union[catalog.MetadataJob, Awaitable[catalog.MetadataJob]], + ]: + raise NotImplementedError() + + @property + def list_metadata_jobs( + self, + ) -> Callable[ + [catalog.ListMetadataJobsRequest], + Union[ + catalog.ListMetadataJobsResponse, + Awaitable[catalog.ListMetadataJobsResponse], + ], + ]: + raise NotImplementedError() + + @property + def cancel_metadata_job( + self, + ) -> Callable[ + [catalog.CancelMetadataJobRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py index 200495c0c780..5cf9af6bd86e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -24,6 +24,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from google.cloud.dataplex_v1.types import catalog @@ -35,10 +36,10 @@ class CatalogServiceGrpcTransport(CatalogServiceTransport): """gRPC backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. This class defines the same methods as the primary client, so the @@ -264,7 +265,7 @@ def create_entry_type( ) -> Callable[[catalog.CreateEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the create entry type method over gRPC. - Creates an EntryType + Creates an EntryType. Returns: Callable[[~.CreateEntryTypeRequest], @@ -290,7 +291,7 @@ def update_entry_type( ) -> Callable[[catalog.UpdateEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the update entry type method over gRPC. - Updates a EntryType resource. + Updates an EntryType. Returns: Callable[[~.UpdateEntryTypeRequest], @@ -316,7 +317,7 @@ def delete_entry_type( ) -> Callable[[catalog.DeleteEntryTypeRequest], operations_pb2.Operation]: r"""Return a callable for the delete entry type method over gRPC. - Deletes a EntryType resource. + Deletes an EntryType. Returns: Callable[[~.DeleteEntryTypeRequest], @@ -368,7 +369,7 @@ def get_entry_type( ) -> Callable[[catalog.GetEntryTypeRequest], catalog.EntryType]: r"""Return a callable for the get entry type method over gRPC. - Retrieves a EntryType resource. + Gets an EntryType. Returns: Callable[[~.GetEntryTypeRequest], @@ -394,7 +395,7 @@ def create_aspect_type( ) -> Callable[[catalog.CreateAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the create aspect type method over gRPC. - Creates an AspectType + Creates an AspectType. Returns: Callable[[~.CreateAspectTypeRequest], @@ -420,7 +421,7 @@ def update_aspect_type( ) -> Callable[[catalog.UpdateAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the update aspect type method over gRPC. - Updates a AspectType resource. + Updates an AspectType. Returns: Callable[[~.UpdateAspectTypeRequest], @@ -446,7 +447,7 @@ def delete_aspect_type( ) -> Callable[[catalog.DeleteAspectTypeRequest], operations_pb2.Operation]: r"""Return a callable for the delete aspect type method over gRPC. - Deletes a AspectType resource. + Deletes an AspectType. Returns: Callable[[~.DeleteAspectTypeRequest], @@ -498,7 +499,7 @@ def get_aspect_type( ) -> Callable[[catalog.GetAspectTypeRequest], catalog.AspectType]: r"""Return a callable for the get aspect type method over gRPC. - Retrieves a AspectType resource. + Gets an AspectType. Returns: Callable[[~.GetAspectTypeRequest], @@ -524,7 +525,7 @@ def create_entry_group( ) -> Callable[[catalog.CreateEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the create entry group method over gRPC. - Creates an EntryGroup + Creates an EntryGroup. Returns: Callable[[~.CreateEntryGroupRequest], @@ -550,7 +551,7 @@ def update_entry_group( ) -> Callable[[catalog.UpdateEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the update entry group method over gRPC. - Updates a EntryGroup resource. + Updates an EntryGroup. Returns: Callable[[~.UpdateEntryGroupRequest], @@ -576,7 +577,7 @@ def delete_entry_group( ) -> Callable[[catalog.DeleteEntryGroupRequest], operations_pb2.Operation]: r"""Return a callable for the delete entry group method over gRPC. - Deletes a EntryGroup resource. + Deletes an EntryGroup. Returns: Callable[[~.DeleteEntryGroupRequest], @@ -628,7 +629,7 @@ def get_entry_group( ) -> Callable[[catalog.GetEntryGroupRequest], catalog.EntryGroup]: r"""Return a callable for the get entry group method over gRPC. - Retrieves a EntryGroup resource. + Gets an EntryGroup. Returns: Callable[[~.GetEntryGroupRequest], @@ -726,7 +727,7 @@ def list_entries( ) -> Callable[[catalog.ListEntriesRequest], catalog.ListEntriesResponse]: r"""Return a callable for the list entries method over gRPC. - Lists entries within an entry group. + Lists Entries within an EntryGroup. Returns: Callable[[~.ListEntriesRequest], @@ -750,7 +751,12 @@ def list_entries( def get_entry(self) -> Callable[[catalog.GetEntryRequest], catalog.Entry]: r"""Return a callable for the get entry method over gRPC. - Gets a single entry. + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.GetEntryRequest], @@ -774,7 +780,13 @@ def get_entry(self) -> Callable[[catalog.GetEntryRequest], catalog.Entry]: def lookup_entry(self) -> Callable[[catalog.LookupEntryRequest], catalog.Entry]: r"""Return a callable for the lookup entry method over gRPC. - Looks up a single entry. + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.LookupEntryRequest], @@ -800,7 +812,8 @@ def search_entries( ) -> Callable[[catalog.SearchEntriesRequest], catalog.SearchEntriesResponse]: r"""Return a callable for the search entries method over gRPC. - Searches for entries matching given query and scope. + Searches for Entries matching the given query and + scope. Returns: Callable[[~.SearchEntriesRequest], @@ -820,6 +833,118 @@ def search_entries( ) return self._stubs["search_entries"] + @property + def create_metadata_job( + self, + ) -> Callable[[catalog.CreateMetadataJobRequest], operations_pb2.Operation]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_job" not in self._stubs: + self._stubs["create_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob", + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_job"] + + @property + def get_metadata_job( + self, + ) -> Callable[[catalog.GetMetadataJobRequest], catalog.MetadataJob]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + ~.MetadataJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_job" not in self._stubs: + self._stubs["get_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetMetadataJob", + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs["get_metadata_job"] + + @property + def list_metadata_jobs( + self, + ) -> Callable[[catalog.ListMetadataJobsRequest], catalog.ListMetadataJobsResponse]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + ~.ListMetadataJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_jobs" not in self._stubs: + self._stubs["list_metadata_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs", + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs["list_metadata_jobs"] + + @property + def cancel_metadata_job( + self, + ) -> Callable[[catalog.CancelMetadataJobRequest], empty_pb2.Empty]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_metadata_job" not in self._stubs: + self._stubs["cancel_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob", + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_metadata_job"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py index 1e62bdf0763f..6ff45ba84779 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -26,6 +26,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -39,10 +40,10 @@ class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): """gRPC AsyncIO backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, Entry and Aspect which - collectively allow a data administrator to organize, manage, - secure and catalog data across their organization located across - cloud projects in a variety of storage systems including Cloud + EntryGroups, EntryTypes, AspectTypes, and Entries. They + collectively let data administrators organize, manage, secure, + and catalog data located across cloud projects in their + organization in a variety of storage systems, including Cloud Storage and BigQuery. This class defines the same methods as the primary client, so the @@ -276,7 +277,7 @@ def create_entry_type( ]: r"""Return a callable for the create entry type method over gRPC. - Creates an EntryType + Creates an EntryType. Returns: Callable[[~.CreateEntryTypeRequest], @@ -304,7 +305,7 @@ def update_entry_type( ]: r"""Return a callable for the update entry type method over gRPC. - Updates a EntryType resource. + Updates an EntryType. Returns: Callable[[~.UpdateEntryTypeRequest], @@ -332,7 +333,7 @@ def delete_entry_type( ]: r"""Return a callable for the delete entry type method over gRPC. - Deletes a EntryType resource. + Deletes an EntryType. Returns: Callable[[~.DeleteEntryTypeRequest], @@ -386,7 +387,7 @@ def get_entry_type( ) -> Callable[[catalog.GetEntryTypeRequest], Awaitable[catalog.EntryType]]: r"""Return a callable for the get entry type method over gRPC. - Retrieves a EntryType resource. + Gets an EntryType. Returns: Callable[[~.GetEntryTypeRequest], @@ -414,7 +415,7 @@ def create_aspect_type( ]: r"""Return a callable for the create aspect type method over gRPC. - Creates an AspectType + Creates an AspectType. Returns: Callable[[~.CreateAspectTypeRequest], @@ -442,7 +443,7 @@ def update_aspect_type( ]: r"""Return a callable for the update aspect type method over gRPC. - Updates a AspectType resource. + Updates an AspectType. Returns: Callable[[~.UpdateAspectTypeRequest], @@ -470,7 +471,7 @@ def delete_aspect_type( ]: r"""Return a callable for the delete aspect type method over gRPC. - Deletes a AspectType resource. + Deletes an AspectType. Returns: Callable[[~.DeleteAspectTypeRequest], @@ -524,7 +525,7 @@ def get_aspect_type( ) -> Callable[[catalog.GetAspectTypeRequest], Awaitable[catalog.AspectType]]: r"""Return a callable for the get aspect type method over gRPC. - Retrieves a AspectType resource. + Gets an AspectType. Returns: Callable[[~.GetAspectTypeRequest], @@ -552,7 +553,7 @@ def create_entry_group( ]: r"""Return a callable for the create entry group method over gRPC. - Creates an EntryGroup + Creates an EntryGroup. Returns: Callable[[~.CreateEntryGroupRequest], @@ -580,7 +581,7 @@ def update_entry_group( ]: r"""Return a callable for the update entry group method over gRPC. - Updates a EntryGroup resource. + Updates an EntryGroup. Returns: Callable[[~.UpdateEntryGroupRequest], @@ -608,7 +609,7 @@ def delete_entry_group( ]: r"""Return a callable for the delete entry group method over gRPC. - Deletes a EntryGroup resource. + Deletes an EntryGroup. Returns: Callable[[~.DeleteEntryGroupRequest], @@ -662,7 +663,7 @@ def get_entry_group( ) -> Callable[[catalog.GetEntryGroupRequest], Awaitable[catalog.EntryGroup]]: r"""Return a callable for the get entry group method over gRPC. - Retrieves a EntryGroup resource. + Gets an EntryGroup. Returns: Callable[[~.GetEntryGroupRequest], @@ -766,7 +767,7 @@ def list_entries( ) -> Callable[[catalog.ListEntriesRequest], Awaitable[catalog.ListEntriesResponse]]: r"""Return a callable for the list entries method over gRPC. - Lists entries within an entry group. + Lists Entries within an EntryGroup. Returns: Callable[[~.ListEntriesRequest], @@ -792,7 +793,12 @@ def get_entry( ) -> Callable[[catalog.GetEntryRequest], Awaitable[catalog.Entry]]: r"""Return a callable for the get entry method over gRPC. - Gets a single entry. + Gets an Entry. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.GetEntryRequest], @@ -818,7 +824,13 @@ def lookup_entry( ) -> Callable[[catalog.LookupEntryRequest], Awaitable[catalog.Entry]]: r"""Return a callable for the lookup entry method over gRPC. - Looks up a single entry. + Looks up a single Entry by name using the permission on the + source system. + + **Caution**: The BigQuery metadata that is stored in Dataplex + Catalog is changing. For more information, see `Changes to + BigQuery metadata stored in Dataplex + Catalog `__. Returns: Callable[[~.LookupEntryRequest], @@ -846,7 +858,8 @@ def search_entries( ]: r"""Return a callable for the search entries method over gRPC. - Searches for entries matching given query and scope. + Searches for Entries matching the given query and + scope. Returns: Callable[[~.SearchEntriesRequest], @@ -866,6 +879,122 @@ def search_entries( ) return self._stubs["search_entries"] + @property + def create_metadata_job( + self, + ) -> Callable[ + [catalog.CreateMetadataJobRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create metadata job method over gRPC. + + Creates a metadata job. For example, use a metadata + job to import Dataplex Catalog entries and aspects from + a third-party system into Dataplex. + + Returns: + Callable[[~.CreateMetadataJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_metadata_job" not in self._stubs: + self._stubs["create_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateMetadataJob", + request_serializer=catalog.CreateMetadataJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_metadata_job"] + + @property + def get_metadata_job( + self, + ) -> Callable[[catalog.GetMetadataJobRequest], Awaitable[catalog.MetadataJob]]: + r"""Return a callable for the get metadata job method over gRPC. + + Gets a metadata job. + + Returns: + Callable[[~.GetMetadataJobRequest], + Awaitable[~.MetadataJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata_job" not in self._stubs: + self._stubs["get_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetMetadataJob", + request_serializer=catalog.GetMetadataJobRequest.serialize, + response_deserializer=catalog.MetadataJob.deserialize, + ) + return self._stubs["get_metadata_job"] + + @property + def list_metadata_jobs( + self, + ) -> Callable[ + [catalog.ListMetadataJobsRequest], Awaitable[catalog.ListMetadataJobsResponse] + ]: + r"""Return a callable for the list metadata jobs method over gRPC. + + Lists metadata jobs. + + Returns: + Callable[[~.ListMetadataJobsRequest], + Awaitable[~.ListMetadataJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_metadata_jobs" not in self._stubs: + self._stubs["list_metadata_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/ListMetadataJobs", + request_serializer=catalog.ListMetadataJobsRequest.serialize, + response_deserializer=catalog.ListMetadataJobsResponse.deserialize, + ) + return self._stubs["list_metadata_jobs"] + + @property + def cancel_metadata_job( + self, + ) -> Callable[[catalog.CancelMetadataJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel metadata job method over gRPC. + + Cancels a metadata job. + + If you cancel a metadata import job that is in progress, + the changes in the job might be partially applied. We + recommend that you reset the state of the entry groups + in your project by running another metadata job that + reverts the changes from the canceled job. + + Returns: + Callable[[~.CancelMetadataJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_metadata_job" not in self._stubs: + self._stubs["cancel_metadata_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CancelMetadataJob", + request_serializer=catalog.CancelMetadataJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_metadata_job"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1089,6 +1218,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_metadata_job: self._wrap_method( + self.create_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata_job: self._wrap_method( + self.get_metadata_job, + default_timeout=None, + client_info=client_info, + ), + self.list_metadata_jobs: self._wrap_method( + self.list_metadata_jobs, + default_timeout=None, + client_info=client_info, + ), + self.cancel_metadata_job: self._wrap_method( + self.cancel_metadata_job, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 6ff314334644..c8c5c1eec991 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -54,6 +54,7 @@ from google.cloud.dataplex_v1.services.data_scan_service import pagers from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -83,12 +84,16 @@ class DataScanServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = DataScanServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = DataScanServiceClient._DEFAULT_UNIVERSE + connection_path = staticmethod(DataScanServiceClient.connection_path) + parse_connection_path = staticmethod(DataScanServiceClient.parse_connection_path) data_scan_path = staticmethod(DataScanServiceClient.data_scan_path) parse_data_scan_path = staticmethod(DataScanServiceClient.parse_data_scan_path) data_scan_job_path = staticmethod(DataScanServiceClient.data_scan_job_path) parse_data_scan_job_path = staticmethod( DataScanServiceClient.parse_data_scan_job_path ) + dataset_path = staticmethod(DataScanServiceClient.dataset_path) + parse_dataset_path = staticmethod(DataScanServiceClient.parse_dataset_path) entity_path = staticmethod(DataScanServiceClient.entity_path) parse_entity_path = staticmethod(DataScanServiceClient.parse_entity_path) common_billing_account_path = staticmethod( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index 03f256f07fdc..262e38518d07 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -60,6 +60,7 @@ from google.cloud.dataplex_v1.services.data_scan_service import pagers from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -202,6 +203,30 @@ def transport(self) -> DataScanServiceTransport: """ return self._transport + @staticmethod + def connection_path( + project: str, + location: str, + connection: str, + ) -> str: + """Returns a fully-qualified connection string.""" + return ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str, str]: + """Parses a connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def data_scan_path( project: str, @@ -248,6 +273,23 @@ def parse_data_scan_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def dataset_path( + project: str, + dataset: str, + ) -> str: + """Returns a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parses a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def entity_path( project: str, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py index 4830d58af947..4c56742ac228 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py @@ -18,10 +18,12 @@ Aspect, AspectSource, AspectType, + CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, CreateEntryRequest, CreateEntryTypeRequest, + CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -35,6 +37,8 @@ GetEntryGroupRequest, GetEntryRequest, GetEntryTypeRequest, + GetMetadataJobRequest, + ImportItem, ListAspectTypesRequest, ListAspectTypesResponse, ListEntriesRequest, @@ -43,7 +47,10 @@ ListEntryGroupsResponse, ListEntryTypesRequest, ListEntryTypesResponse, + ListMetadataJobsRequest, + ListMetadataJobsResponse, LookupEntryRequest, + MetadataJob, SearchEntriesRequest, SearchEntriesResponse, SearchEntriesResult, @@ -61,6 +68,7 @@ ListContentResponse, UpdateContentRequest, ) +from .data_discovery import DataDiscoveryResult, DataDiscoverySpec from .data_profile import DataProfileResult, DataProfileSpec from .data_quality import ( DataQualityColumnResult, @@ -196,10 +204,12 @@ "Aspect", "AspectSource", "AspectType", + "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", "CreateEntryRequest", "CreateEntryTypeRequest", + "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -212,6 +222,8 @@ "GetEntryGroupRequest", "GetEntryRequest", "GetEntryTypeRequest", + "GetMetadataJobRequest", + "ImportItem", "ListAspectTypesRequest", "ListAspectTypesResponse", "ListEntriesRequest", @@ -220,7 +232,10 @@ "ListEntryGroupsResponse", "ListEntryTypesRequest", "ListEntryTypesResponse", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", "LookupEntryRequest", + "MetadataJob", "SearchEntriesRequest", "SearchEntriesResponse", "SearchEntriesResult", @@ -236,6 +251,8 @@ "ListContentRequest", "ListContentResponse", "UpdateContentRequest", + "DataDiscoveryResult", + "DataDiscoverySpec", "DataProfileResult", "DataProfileSpec", "DataQualityColumnResult", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 6cfe3ecbfbb5..5cfc8ed82adc 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -62,6 +62,13 @@ "SearchEntriesRequest", "SearchEntriesResult", "SearchEntriesResponse", + "ImportItem", + "CreateMetadataJobRequest", + "GetMetadataJobRequest", + "ListMetadataJobsRequest", + "ListMetadataJobsResponse", + "CancelMetadataJobRequest", + "MetadataJob", }, ) @@ -80,12 +87,11 @@ class EntryView(proto.Enum): keys of all non-required aspects. CUSTOM (3): Returns aspects matching custom fields in - GetEntryRequest. If the number of aspects would - exceed 100, the first 100 will be returned. + GetEntryRequest. If the number of aspects + exceeds 100, the first 100 will be returned. ALL (4): Returns all aspects. If the number of aspects - would exceed 100, the first 100 will be - returned. + exceeds 100, the first 100 will be returned. """ ENTRY_VIEW_UNSPECIFIED = 0 BASIC = 1 @@ -119,9 +125,9 @@ class TransferStatus(proto.Enum): class AspectType(proto.Message): - r"""Aspect Type is a template for creating Aspects, and - represents the JSON-schema for a given Entry, e.g., BigQuery - Table Schema. + r"""AspectType is a template for creating Aspects, and represents + the JSON-schema for a given Entry, for example, BigQuery Table + Schema. Attributes: name (str): @@ -130,9 +136,9 @@ class AspectType(proto.Message): projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}. uid (str): Output only. System generated globally unique - ID for the AspectType. This ID will be different - if the AspectType is deleted and re-created with - the same name. + ID for the AspectType. If you delete and + recreate the AspectType with the same name, then + this ID will be different. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the AspectType was created. @@ -147,12 +153,12 @@ class AspectType(proto.Message): Optional. User-defined labels for the AspectType. etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. + The service computes this checksum. The + client may send it on update and delete requests + to ensure it has an up-to-date value before + proceeding. authorization (google.cloud.dataplex_v1.types.AspectType.Authorization): - Immutable. Authorization defined for this + Immutable. Defines the Authorization for this type. metadata_template (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): Required. MetadataTemplate of the aspect. @@ -163,13 +169,13 @@ class AspectType(proto.Message): """ class Authorization(proto.Message): - r"""Autorization for an Aspect Type. + r"""Autorization for an AspectType. Attributes: alternate_use_permission (str): Immutable. The IAM permission grantable on - the Entry Group to allow access to instantiate - Aspects of Dataplex owned Aspect Types, only + the EntryGroup to allow access to instantiate + Aspects of Dataplex owned AspectTypes, only settable for Dataplex owned Types. """ @@ -179,7 +185,7 @@ class Authorization(proto.Message): ) class MetadataTemplate(proto.Message): - r"""MetadataTemplate definition for AspectType + r"""MetadataTemplate definition for an AspectType. Attributes: index (int): @@ -196,48 +202,59 @@ class MetadataTemplate(proto.Message): name (str): Required. The name of the field. type_ (str): - Required. The datatype of this field. The - following values are supported: Primitive types - (string, integer, boolean, double, datetime); - datetime must be of the format RFC3339 UTC - "Zulu" (Examples: - - "2014-10-02T15:01:23Z" and - "2014-10-02T15:01:23.045123456Z"). Complex types - (enum, array, map, record). + Required. The datatype of this field. The following values + are supported: + + Primitive types: + + - string + - integer + - boolean + - double + - datetime. Must be of the format RFC3339 UTC "Zulu" + (Examples: "2014-10-02T15:01:23Z" and + "2014-10-02T15:01:23.045123456Z"). + + Complex types: + + - enum + - array + - map + - record record_fields (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate]): - Optional. Field definition, needs to be - specified if the type is record. Defines the - nested fields. + Optional. Field definition. You must specify + it if the type is record. It defines the nested + fields. enum_values (MutableSequence[google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.EnumValue]): Optional. The list of values for an enum - type. Needs to be defined if the type is enum. + type. You must define it if the type is enum. map_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. map_items needs to be set if the type is map. - map_items can refer to a primitive field or a complex - (record only) field. To specify a primitive field, just name - and type needs to be set in the nested MetadataTemplate. The - recommended value for the name field is item, as this is not - used in the actual payload. + Optional. If the type is map, set map_items. map_items can + refer to a primitive field or a complex (record only) field. + To specify a primitive field, you only need to set name and + type in the nested MetadataTemplate. The recommended value + for the name field is item, as this isn't used in the actual + payload. array_items (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate): - Optional. array_items needs to be set if the type is array. - array_items can refer to a primitive field or a complex - (record only) field. To specify a primitive field, just name - and type needs to be set in the nested MetadataTemplate. The - recommended value for the name field is item, as this is not + Optional. If the type is array, set array_items. array_items + can refer to a primitive field or a complex (record only) + field. To specify a primitive field, you only need to set + name and type in the nested MetadataTemplate. The + recommended value for the name field is item, as this isn't used in the actual payload. type_id (str): - Optional. Id can be used if this definition - of the field needs to be reused later. Id needs - to be unique across the entire template. Id can - only be specified if the field type is record. + Optional. You can use type id if this + definition of the field needs to be reused + later. The type id must be unique across the + entire template. You can only specify it if the + field type is record. type_ref (str): Optional. A reference to another field - definition (instead of an inline definition). - The value must be equal to the value of an id - field defined elsewhere in the MetadataTemplate. - Only fields with type as record can refer to - other fields. + definition (not an inline definition). The value + must be equal to the value of an id field + defined elsewhere in the MetadataTemplate. Only + fields with record type can refer to other + fields. constraints (google.cloud.dataplex_v1.types.AspectType.MetadataTemplate.Constraints): Optional. Specifies the constraints on this field. @@ -247,18 +264,18 @@ class MetadataTemplate(proto.Message): """ class EnumValue(proto.Message): - r"""Definition of Enumvalue (to be used by enum fields) + r"""Definition of Enumvalue, to be used for enum fields. Attributes: index (int): - Required. Index for the enum. Cannot be - modified. + Required. Index for the enum value. It can't + be modified. name (str): Required. Name of the enumvalue. This is the - actual value that the aspect will contain. + actual value that the aspect can contain. deprecated (str): - Optional. Optional deprecation message to be - set if an enum value needs to be deprecated. + Optional. You can set this message if you + need to deprecate an enum value. """ index: int = proto.Field( @@ -275,12 +292,12 @@ class EnumValue(proto.Message): ) class Constraints(proto.Message): - r"""Definition of the constraints of a field + r"""Definition of the constraints of a field. Attributes: required (bool): - Optional. Marks this as an optional/required - field. + Optional. Marks this field as optional or + required. """ required: bool = proto.Field( @@ -289,33 +306,32 @@ class Constraints(proto.Message): ) class Annotations(proto.Message): - r"""Definition of the annotations of a field + r"""Definition of the annotations of a field. Attributes: deprecated (str): - Optional. Marks a field as deprecated, a - deprecation message can be included. + Optional. Marks a field as deprecated. You + can include a deprecation message. display_name (str): - Optional. Specify a displayname for a field. + Optional. Display name for a field. description (str): - Optional. Specify a description for a field + Optional. Description for a field. display_order (int): - Optional. Specify a display order for a - field. Display order can be used to reorder - where a field is rendered + Optional. Display order for a field. You can + use this to reorder where a field is rendered. string_type (str): - Optional. String Type annotations can be used - to specify special meaning to string fields. The - following values are supported: richText: - - The field must be interpreted as a rich text - field. url: A fully qualified url link. - resource: A service qualified resource - reference. + Optional. You can use String Type annotations to specify + special meaning to string fields. The following values are + supported: + + - richText: The field must be interpreted as a rich text + field. + - url: A fully qualified URL link. + - resource: A service qualified resource reference. string_values (MutableSequence[str]): Optional. Suggested hints for string fields. - These can be used to suggest values to users, - through an UI for example. + You can use them to suggest values to users + through console. """ deprecated: str = proto.Field( @@ -457,13 +473,13 @@ class EntryGroup(proto.Message): Attributes: name (str): Output only. The relative resource name of the EntryGroup, - of the form: - projects/{project_number}/locations/{location_id}/entryGroups/{entry_group_id}. + in the format + projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}. uid (str): Output only. System generated globally unique - ID for the EntryGroup. This ID will be different - if the EntryGroup is deleted and re-created with - the same name. + ID for the EntryGroup. If you delete and + recreate the EntryGroup with the same name, this + ID will be different. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the EntryGroup was created. @@ -478,10 +494,10 @@ class EntryGroup(proto.Message): Optional. User-defined labels for the EntryGroup. etag (str): - This checksum is computed by the server based - on the value of other fields, and may be sent on - update and delete requests to ensure the client - has an up-to-date value before proceeding. + This checksum is computed by the service, and + might be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. transfer_status (google.cloud.dataplex_v1.types.TransferStatus): Output only. Denotes the transfer status of the Entry Group. It is unspecified for Entry @@ -558,13 +574,13 @@ class EntryType(proto.Message): EntryType. etag (str): Optional. This checksum is computed by the - server based on the value of other fields, and - may be sent on update and delete requests to - ensure the client has an up-to-date value before - proceeding. + service, and might be sent on update and delete + requests to ensure the client has an up-to-date + value before proceeding. type_aliases (MutableSequence[str]): - Optional. Indicates the class this Entry Type - belongs to, for example, TABLE, DATABASE, MODEL. + Optional. Indicates the classes this Entry + Type belongs to, for example, TABLE, DATABASE, + MODEL. platform (str): Optional. The platform that Entries of this type belongs to. @@ -684,11 +700,12 @@ class Aspect(proto.Message): Output only. The time when the Aspect was last updated. data (google.protobuf.struct_pb2.Struct): - Required. The content of the aspect, according to its aspect - type schema. This will replace ``content``. The maximum size - of the field is 120KB (encoded as UTF-8). + Required. The content of the aspect, + according to its aspect type schema. The maximum + size of the field is 120KB (encoded as UTF-8). aspect_source (google.cloud.dataplex_v1.types.AspectSource): - + Optional. Information related to the source + system of the aspect. """ aspect_type: str = proto.Field( @@ -722,16 +739,21 @@ class Aspect(proto.Message): class AspectSource(proto.Message): - r"""AspectSource contains source system related information for - the aspect. + r"""Information related to the source system of the aspect. Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): - The create time of the aspect in the source + The time the aspect was created in the source system. update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the aspect in the source - system. + The time the aspect was last updated in the + source system. + data_version (str): + The version of the data format used to + produce this data. This field is used to + indicated when the underlying data format + changes (e.g., schema modifications, changes to + the source URL format definition, etc). """ create_time: timestamp_pb2.Timestamp = proto.Field( @@ -744,48 +766,52 @@ class AspectSource(proto.Message): number=11, message=timestamp_pb2.Timestamp, ) + data_version: str = proto.Field( + proto.STRING, + number=12, + ) class Entry(proto.Message): - r"""An entry is a representation of a data asset which can be + r"""An entry is a representation of a data resource that can be described by various metadata. Attributes: name (str): - Identifier. The relative resource name of the Entry, of the - form: - projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}. + Identifier. The relative resource name of the entry, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. entry_type (str): - Required. Immutable. The resource name of the - EntryType used to create this Entry. + Required. Immutable. The relative resource name of the entry + type that was used to create this entry, in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry was - created. + Output only. The time when the entry was + created in Dataplex. update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the Entry was last - updated. + Output only. The time when the entry was last + updated in Dataplex. aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): - Optional. The Aspects attached to the Entry. - The format for the key can be one of the - following: - - 1. {projectId}.{locationId}.{aspectTypeId} (if - the aspect is attached directly to the - entry) - 2. - {projectId}.{locationId}.{aspectTypeId}@{path} - (if the aspect is attached to an entry's - path) + Optional. The aspects that are attached to the entry. + Depending on how the aspect is attached to the entry, the + format of the aspect key can be one of the following: + + - If the aspect is attached directly to the entry: + ``{project_id_or_number}.{location_id}.{aspect_type_id}`` + - If the aspect is attached to an entry's path: + ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` parent_entry (str): Optional. Immutable. The resource name of the parent entry. fully_qualified_name (str): - Optional. A name for the entry that can - reference it in an external system. The maximum - size of the field is 4000 characters. + Optional. A name for the entry that can be referenced by an + external system. For more information, see `Fully qualified + names `__. + The maximum size of the field is 4000 characters. entry_source (google.cloud.dataplex_v1.types.EntrySource): - Optional. Source system related information - for an entry. + Optional. Information related to the source + system of the data resource that is represented + by the entry. """ name: str = proto.Field( @@ -828,52 +854,55 @@ class Entry(proto.Message): class EntrySource(proto.Message): - r"""EntrySource contains source system related information for - the entry. + r"""Information related to the source system of the data resource + that is represented by the entry. Attributes: resource (str): The name of the resource in the source - system. The maximum size of the field is 4000 - characters. + system. Maximum length is 4,000 characters. system (str): The name of the source system. - The maximum size of the field is 64 characters. + Maximum length is 64 characters. platform (str): The platform containing the source system. - The maximum size of the field is 64 characters. + Maximum length is 64 characters. display_name (str): - User friendly display name. - The maximum size of the field is 500 characters. + A user-friendly display name. + Maximum length is 500 characters. description (str): - Description of the Entry. - The maximum size of the field is 2000 - characters. + A description of the data resource. + Maximum length is 2,000 characters. labels (MutableMapping[str, str]): User-defined labels. The maximum size of keys and values is 128 characters each. ancestors (MutableSequence[google.cloud.dataplex_v1.types.EntrySource.Ancestor]): - Immutable. The ancestors of the Entry in the - source system. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The create time of the resource in the source + Immutable. The entries representing the + ancestors of the data resource in the source system. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the resource was created in the + source system. update_time (google.protobuf.timestamp_pb2.Timestamp): - The update time of the resource in the source - system. + The time when the resource was last updated in the source + system. If the entry exists in the system and its + ``EntrySource`` has ``update_time`` populated, further + updates to the ``EntrySource`` of the entry must provide + incremental updates to its ``update_time``. location (str): Output only. Location of the resource in the - source system. Entry will be searchable by this + source system. You can search the entry by this location. By default, this should match the - location of the EntryGroup containing this - entry. A different value allows capturing source - location for data external to GCP. + location of the entry group containing this + entry. A different value allows capturing the + source location for data external to Google + Cloud. """ class Ancestor(proto.Message): - r"""Ancestor contains information about individual items in the - hierarchy of an Entry. + r"""Information about individual items in the hierarchy that is + associated with the data resource. Attributes: name (str): @@ -938,7 +967,7 @@ class Ancestor(proto.Message): class CreateEntryGroupRequest(proto.Message): - r"""Create EntryGroup Request + r"""Create EntryGroup Request. Attributes: parent (str): @@ -948,10 +977,11 @@ class CreateEntryGroupRequest(proto.Message): entry_group_id (str): Required. EntryGroup identifier. entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -974,16 +1004,17 @@ class CreateEntryGroupRequest(proto.Message): class UpdateEntryGroupRequest(proto.Message): - r"""Update EntryGroup Request + r"""Update EntryGroup Request. Attributes: entry_group (google.cloud.dataplex_v1.types.EntryGroup): - Required. EntryGroup Resource + Required. EntryGroup Resource. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request, + without performing any mutations. The default is + false. """ entry_group: "EntryGroup" = proto.Field( @@ -1003,7 +1034,7 @@ class UpdateEntryGroupRequest(proto.Message): class DeleteEntryGroupRequest(proto.Message): - r"""Delele EntryGroup Request + r"""Delete EntryGroup Request. Attributes: name (str): @@ -1013,7 +1044,7 @@ class DeleteEntryGroupRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteEntryGroupRequest method returns an - ABORTED error response + ABORTED error response. """ name: str = proto.Field( @@ -1034,18 +1065,18 @@ class ListEntryGroupsRequest(proto.Message): Required. The resource name of the entryGroup location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of EntryGroups to return. The service may return fewer than this - value. If unspecified, at most 10 EntryGroups - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 EntryGroups. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListEntryGroups`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListEntryGroups`` must match the call that + subsequent page. When paginating, all other parameters you + provide to ``ListEntryGroups`` must match the call that provided the page token. filter (str): Optional. Filter request. @@ -1076,18 +1107,17 @@ class ListEntryGroupsRequest(proto.Message): class ListEntryGroupsResponse(proto.Message): - r"""List ListEntryGroups response. + r"""List entry groups response. Attributes: entry_groups (MutableSequence[google.cloud.dataplex_v1.types.EntryGroup]): - ListEntryGroups under the given parent - location. + Entry groups under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1125,20 +1155,21 @@ class GetEntryGroupRequest(proto.Message): class CreateEntryTypeRequest(proto.Message): - r"""Create EntryType Request + r"""Create EntryType Request. Attributes: parent (str): Required. The resource name of the EntryType, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. entry_type_id (str): Required. EntryType identifier. entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -1161,16 +1192,17 @@ class CreateEntryTypeRequest(proto.Message): class UpdateEntryTypeRequest(proto.Message): - r"""Update EntryType Request + r"""Update EntryType Request. Attributes: entry_type (google.cloud.dataplex_v1.types.EntryType): - Required. EntryType Resource + Required. EntryType Resource. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ entry_type: "EntryType" = proto.Field( @@ -1190,7 +1222,7 @@ class UpdateEntryTypeRequest(proto.Message): class DeleteEntryTypeRequest(proto.Message): - r"""Delele EntryType Request + r"""Delele EntryType Request. Attributes: name (str): @@ -1200,7 +1232,7 @@ class DeleteEntryTypeRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteEntryTypeRequest method returns an ABORTED - error response + error response. """ name: str = proto.Field( @@ -1221,32 +1253,32 @@ class ListEntryTypesRequest(proto.Message): Required. The resource name of the EntryType location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of EntryTypes to return. The service may return fewer than this - value. If unspecified, at most 10 EntryTypes - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 EntryTypes. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListEntryTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters + subsequent page. When paginating, all other parameters you provided to ``ListEntryTypes`` must match the call that provided the page token. filter (str): - Optional. Filter request. Filters are - case-sensitive. The following formats are - supported: - - labels.key1 = "value1" - labels:key1 - name = "value" - These restrictions can be coinjoined with AND, - OR and NOT conjunctions. + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. """ parent: str = proto.Field( @@ -1272,18 +1304,17 @@ class ListEntryTypesRequest(proto.Message): class ListEntryTypesResponse(proto.Message): - r"""List EntryTypes response + r"""List EntryTypes response. Attributes: entry_types (MutableSequence[google.cloud.dataplex_v1.types.EntryType]): - ListEntryTypes under the given parent - location. + EntryTypes under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1306,7 +1337,7 @@ def raw_page(self): class GetEntryTypeRequest(proto.Message): - r"""Get EntryType request + r"""Get EntryType request. Attributes: name (str): @@ -1321,20 +1352,21 @@ class GetEntryTypeRequest(proto.Message): class CreateAspectTypeRequest(proto.Message): - r"""Create AspectType Request + r"""Create AspectType Request. Attributes: parent (str): Required. The resource name of the AspectType, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. aspect_type_id (str): Required. AspectType identifier. aspect_type (google.cloud.dataplex_v1.types.AspectType): - Required. AspectType Resource + Required. AspectType Resource. validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is false. + Optional. The service validates the request + without performing any mutations. The default is + false. """ parent: str = proto.Field( @@ -1386,7 +1418,7 @@ class UpdateAspectTypeRequest(proto.Message): class DeleteAspectTypeRequest(proto.Message): - r"""Delele AspectType Request + r"""Delele AspectType Request. Attributes: name (str): @@ -1396,7 +1428,7 @@ class DeleteAspectTypeRequest(proto.Message): Optional. If the client provided etag value does not match the current etag value, the DeleteAspectTypeRequest method returns an - ABORTED error response + ABORTED error response. """ name: str = proto.Field( @@ -1410,39 +1442,39 @@ class DeleteAspectTypeRequest(proto.Message): class ListAspectTypesRequest(proto.Message): - r"""List AspectTypes request + r"""List AspectTypes request. Attributes: parent (str): Required. The resource name of the AspectType location, of the form: ``projects/{project_number}/locations/{location_id}`` where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of AspectTypes to return. The service may return fewer than this - value. If unspecified, at most 10 AspectTypes - will be returned. The maximum value is 1000; + value. If unspecified, the service returns at + most 10 AspectTypes. The maximum value is 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. Page token received from a previous ``ListAspectTypes`` call. Provide this to retrieve the - subsequent page. When paginating, all other parameters - provided to ``ListAspectTypes`` must match the call that + subsequent page. When paginating, all other parameters you + provide to ``ListAspectTypes`` must match the call that provided the page token. filter (str): - Optional. Filter request. Filters are - case-sensitive. The following formats are - supported: - - labels.key1 = "value1" - labels:key1 - name = "value" - These restrictions can be coinjoined with AND, - OR and NOT conjunctions. + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - labels.key1 = "value1" + - labels:key1 + - name = "value" + + These restrictions can be conjoined with AND, OR, and NOT + conjunctions. order_by (str): - Optional. Order by fields (``name`` or ``create_time``) for - the result. If not specified, the ordering is undefined. + Optional. Orders the result by ``name`` or ``create_time`` + fields. If not specified, the ordering is undefined. """ parent: str = proto.Field( @@ -1468,18 +1500,17 @@ class ListAspectTypesRequest(proto.Message): class ListAspectTypesResponse(proto.Message): - r"""List AspectTypes response + r"""List AspectTypes response. Attributes: aspect_types (MutableSequence[google.cloud.dataplex_v1.types.AspectType]): - ListAspectTypes under the given parent - location. + AspectTypes under the given parent location. next_page_token (str): Token to retrieve the next page of results, or empty if there are no more results in the list. unreachable_locations (MutableSequence[str]): - Locations that could not be reached. + Locations that the service couldn't reach. """ @property @@ -1502,7 +1533,7 @@ def raw_page(self): class GetAspectTypeRequest(proto.Message): - r"""Get AspectType request + r"""Get AspectType request. Attributes: name (str): @@ -1517,7 +1548,7 @@ class GetAspectTypeRequest(proto.Message): class CreateEntryRequest(proto.Message): - r""" + r"""Create Entry request. Attributes: parent (str): @@ -1527,22 +1558,22 @@ class CreateEntryRequest(proto.Message): Required. Entry identifier. It has to be unique within an Entry Group. - Entries corresponding to Google Cloud resources use Entry ID - format based on Full Resource Names - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - The format is a Full Resource Name of the resource without - the prefix double slashes in the API Service Name part of - Full Resource Name. This allows retrieval of entries using - their associated resource name. + Entries corresponding to Google Cloud resources use an Entry + ID format based on `full resource + names `__. + The format is a full resource name of the resource without + the prefix double slashes in the API service name part of + the full resource name. This allows retrieval of entries + using their associated resource name. - For example if the Full Resource Name of a resource is + For example, if the full resource name of a resource is ``//library.googleapis.com/shelves/shelf1/books/book2``, then the suggested entry_id is ``library.googleapis.com/shelves/shelf1/books/book2``. It is also suggested to follow the same convention for - entries corresponding to resources from other providers or - systems than Google Cloud. + entries corresponding to resources from providers or systems + other than Google Cloud. The maximum size of the field is 4000 characters. entry (google.cloud.dataplex_v1.types.Entry): @@ -1565,7 +1596,7 @@ class CreateEntryRequest(proto.Message): class UpdateEntryRequest(proto.Message): - r""" + r"""Update Entry request. Attributes: entry (google.cloud.dataplex_v1.types.Entry): @@ -1574,31 +1605,35 @@ class UpdateEntryRequest(proto.Message): Optional. Mask of fields to update. To update Aspects, the update_mask must contain the value "aspects". - If the update_mask is empty, all modifiable fields present - in the request will be updated. + If the update_mask is empty, the service will update all + modifiable fields present in the request. allow_missing (bool): - Optional. If set to true and the entry does - not exist, it will be created. + Optional. If set to true and the entry + doesn't exist, the service will create it. delete_missing_aspects (bool): Optional. If set to true and the aspect_keys specify aspect - ranges, any existing aspects from that range not provided in - the request will be deleted. + ranges, the service deletes any existing aspects from that + range that weren't provided in the request. aspect_keys (MutableSequence[str]): - Optional. The map keys of the Aspects which should be - modified. Supports the following syntaxes: - - - - matches aspect on given type - and empty path - - @path - matches aspect on given - type and specified path - - \* - matches aspects on given type - for all paths - - \*@path - matches aspects of all types on the given path - - Existing aspects matching the syntax will not be removed - unless ``delete_missing_aspects`` is set to true. - - If this field is left empty, it will be treated as + Optional. The map keys of the Aspects which the service + should modify. It supports the following syntaxes: + + - ```` - matches an aspect of the + given type and empty path. + - ``@path`` - matches an aspect of + the given type and specified path. For example, to attach + an aspect to a field that is specified by the ``schema`` + aspect, the path should have the format + ``Schema.``. + - ``*`` - matches aspects of the + given type for all paths. + - ``*@path`` - matches aspects of all types on the given + path. + + The service will not remove existing aspects matching the + syntax unless ``delete_missing_aspects`` is set to true. + + If this field is left empty, the service treats it as specifying exactly those Aspects present in the request. """ @@ -1627,7 +1662,7 @@ class UpdateEntryRequest(proto.Message): class DeleteEntryRequest(proto.Message): - r""" + r"""Delete Entry request. Attributes: name (str): @@ -1642,31 +1677,46 @@ class DeleteEntryRequest(proto.Message): class ListEntriesRequest(proto.Message): - r""" + r"""List Entries request. Attributes: parent (str): Required. The resource name of the parent Entry Group: ``projects/{project}/locations/{location}/entryGroups/{entry_group}``. page_size (int): - + Optional. Number of items to return per page. If there are + remaining results, the service returns a next_page_token. If + unspecified, the service returns at most 10 Entries. The + maximum value is 100; values above 100 will be coerced to + 100. page_token (str): - Optional. The pagination token returned by a - previous request. + Optional. Page token received from a previous + ``ListEntries`` call. Provide this to retrieve the + subsequent page. filter (str): Optional. A filter on the entries to return. Filters are - case-sensitive. The request can be filtered by the following - fields: entry_type, entry_source.display_name. The - comparison operators are =, !=, <, >, <=, >= (strings are - compared according to lexical order) The logical operators - AND, OR, NOT can be used in the filter. Wildcard "*" can be - used, but for entry_type the full project id or number needs - to be provided. Example filter expressions: - `entry_source.display_name=AnExampleDisplayName` - `entry_type=projects/example-project/locations/global/entryTypes/example-entry_type` - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` `NOT - entry_source.display_name=AnotherExampleDisplayName`. + case-sensitive. You can filter the request by the following + fields: + + - entry_type + - entry_source.display_name + + The comparison operators are =, !=, <, >, <=, >=. The + service compares strings according to lexical order. + + You can use the logical operators AND, OR, NOT in the + filter. + + You can use Wildcard "*", but for entry_type you need to + provide the full project id or number. + + Example filter expressions: + + - "entry_source.display_name=AnExampleDisplayName" + - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" + - "entry_type=projects/example-project/locations/us/entryTypes/a\* + OR entry_type=projects/another-project/locations/\*" + - "NOT entry_source.display_name=AnotherExampleDisplayName". """ parent: str = proto.Field( @@ -1688,13 +1738,16 @@ class ListEntriesRequest(proto.Message): class ListEntriesResponse(proto.Message): - r""" + r"""List Entries response. Attributes: entries (MutableSequence[google.cloud.dataplex_v1.types.Entry]): - The list of entries. + The list of entries under the given parent + location. next_page_token (str): - Pagination token. + Token to retrieve the next page of results, + or empty if there are no more results in the + list. """ @property @@ -1713,24 +1766,23 @@ def raw_page(self): class GetEntryRequest(proto.Message): - r""" + r"""Get Entry request. Attributes: name (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. view (google.cloud.dataplex_v1.types.EntryView): - Optional. View for controlling which parts of - an entry are to be returned. + Optional. View to control which parts of an + entry the service should return. aspect_types (MutableSequence[str]): Optional. Limits the aspects returned to the - provided aspect types. Only works if the CUSTOM - view is selected. + provided aspect types. It only works for CUSTOM + view. paths (MutableSequence[str]): Optional. Limits the aspects returned to those associated with the provided paths within - the Entry. Only works if the CUSTOM view is - selected. + the Entry. It only works for CUSTOM view. """ name: str = proto.Field( @@ -1753,7 +1805,7 @@ class GetEntryRequest(proto.Message): class LookupEntryRequest(proto.Message): - r""" + r"""Lookup Entry request using permissions in the source system. Attributes: name (str): @@ -1761,17 +1813,16 @@ class LookupEntryRequest(proto.Message): attributed in the following form: ``projects/{project}/locations/{location}``. view (google.cloud.dataplex_v1.types.EntryView): - Optional. View for controlling which parts of - an entry are to be returned. + Optional. View to control which parts of an + entry the service should return. aspect_types (MutableSequence[str]): Optional. Limits the aspects returned to the - provided aspect types. Only works if the CUSTOM - view is selected. + provided aspect types. It only works for CUSTOM + view. paths (MutableSequence[str]): Optional. Limits the aspects returned to those associated with the provided paths within - the Entry. Only works if the CUSTOM view is - selected. + the Entry. It only works for CUSTOM view. entry (str): Required. The resource name of the Entry: ``projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}``. @@ -1812,18 +1863,21 @@ class SearchEntriesRequest(proto.Message): Required. The query against which entries in scope should be matched. page_size (int): - Optional. Pagination. + Optional. Number of results in the search page. If <=0, then + defaults to 10. Max limit for page_size is 1000. Throws an + invalid argument for page_size > 1000. page_token (str): - + Optional. Page token received from a previous + ``SearchEntries`` call. Provide this to retrieve the + subsequent page. order_by (str): - Optional. Ordering of the results. Supported - options to be added later. + Optional. Specifies the ordering of results. scope (str): Optional. The scope under which the search should be - operating. Should either be organizations/ or - projects/. If left unspecified, it will default - to the organization where the project provided in ``name`` - is located. + operating. It must either be ``organizations/`` or + ``projects/``. If it is unspecified, it + defaults to the organization where the project provided in + ``name`` is located. """ name: str = proto.Field( @@ -1903,12 +1957,16 @@ class SearchEntriesResponse(proto.Message): The results matching the search query. total_size (int): The estimated total number of matching - entries. Not guaranteed to be accurate. + entries. This number isn't guaranteed to be + accurate. next_page_token (str): - Pagination token. + Token to retrieve the next page of results, + or empty if there are no more results in the + list. unreachable (MutableSequence[str]): - Unreachable locations. Search results don't - include data from those locations. + Locations that the service couldn't reach. + Search results don't include data from these + locations. """ @property @@ -1934,4 +1992,644 @@ def raw_page(self): ) +class ImportItem(proto.Message): + r"""An object that describes the values that you want to set for an + entry and its attached aspects when you import metadata. Used when + you run a metadata import job. See + [CreateMetadataJob][google.cloud.dataplex.v1.CatalogService.CreateMetadataJob]. + + You provide a collection of import items in a metadata import file. + For more information about how to create a metadata import file, see + `Metadata import + file `__. + + Attributes: + entry (google.cloud.dataplex_v1.types.Entry): + Information about an entry and its attached + aspects. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to update, in paths that are relative to the + ``Entry`` resource. Separate each field with a comma. + + In ``FULL`` entry sync mode, Dataplex includes the paths of + all of the fields for an entry that can be modified, + including aspects. This means that Dataplex replaces the + existing entry with the entry in the metadata import file. + All modifiable fields are updated, regardless of the fields + that are listed in the update mask, and regardless of + whether a field is present in the ``entry`` object. + + The ``update_mask`` field is ignored when an entry is + created or re-created. + + Dataplex also determines which entries and aspects to modify + by comparing the values and timestamps that you provide in + the metadata import file with the values and timestamps that + exist in your project. For more information, see `Comparison + logic `__. + aspect_keys (MutableSequence[str]): + The aspects to modify. Supports the following syntaxes: + + - ``{aspect_type_reference}``: matches aspects that belong + to the specified aspect type and are attached directly to + the entry. + - ``{aspect_type_reference}@{path}``: matches aspects that + belong to the specified aspect type and path. + - ``{aspect_type_reference}@*``: matches aspects that + belong to the specified aspect type for all paths. + + Replace ``{aspect_type_reference}`` with a reference to the + aspect type, in the format + ``{project_id_or_number}.{location_id}.{aspect_type_id}``. + + If you leave this field empty, it is treated as specifying + exactly those aspects that are present within the specified + entry. + + In ``FULL`` entry sync mode, Dataplex implicitly adds the + keys for all of the required aspects of an entry. + """ + + entry: "Entry" = proto.Field( + proto.MESSAGE, + number=1, + message="Entry", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + aspect_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateMetadataJobRequest(proto.Message): + r"""Create metadata job request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + metadata_job (google.cloud.dataplex_v1.types.MetadataJob): + Required. The metadata job resource. + metadata_job_id (str): + Optional. The metadata job ID. If not provided, a unique ID + is generated with the prefix ``metadata-job-``. + validate_only (bool): + Optional. The service validates the request + without performing any mutations. The default is + false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + metadata_job: "MetadataJob" = proto.Field( + proto.MESSAGE, + number=2, + message="MetadataJob", + ) + metadata_job_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetMetadataJobRequest(proto.Message): + r"""Get metadata job request. + + Attributes: + name (str): + Required. The resource name of the metadata job, in the + format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMetadataJobsRequest(proto.Message): + r"""List metadata jobs request. + + Attributes: + parent (str): + Required. The resource name of the parent location, in the + format + ``projects/{project_id_or_number}/locations/{location_id}`` + page_size (int): + Optional. The maximum number of metadata jobs + to return. The service might return fewer jobs + than this value. If unspecified, at most 10 jobs + are returned. The maximum value is 1,000. + page_token (str): + Optional. The page token received from a previous + ``ListMetadataJobs`` call. Provide this token to retrieve + the subsequent page of results. When paginating, all other + parameters that are provided to the ``ListMetadataJobs`` + request must match the call that provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + service supports the following formats: + + - ``labels.key1 = "value1"`` + - ``labels:key1`` + - ``name = "value"`` + + You can combine filters with ``AND``, ``OR``, and ``NOT`` + operators. + order_by (str): + Optional. The field to sort the results by, either ``name`` + or ``create_time``. If not specified, the ordering is + undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListMetadataJobsResponse(proto.Message): + r"""List metadata jobs response. + + Attributes: + metadata_jobs (MutableSequence[google.cloud.dataplex_v1.types.MetadataJob]): + Metadata jobs under the specified parent + location. + next_page_token (str): + A token to retrieve the next page of results. + If there are no more results in the list, the + value is empty. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + metadata_jobs: MutableSequence["MetadataJob"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MetadataJob", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CancelMetadataJobRequest(proto.Message): + r"""Cancel metadata job request. + + Attributes: + name (str): + Required. The resource name of the job, in the format + ``projects/{project_id_or_number}/locations/{location_id}/metadataJobs/{metadata_job_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataJob(proto.Message): + r"""A metadata job resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The name of the resource that the + configuration is applied to, in the format + ``projects/{project_number}/locations/{location_id}/metadataJobs/{metadata_job_id}``. + uid (str): + Output only. A system-generated, globally + unique ID for the metadata job. If the metadata + job is deleted and then re-created with the same + name, this ID is different. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the metadata job + was updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels. + type_ (google.cloud.dataplex_v1.types.MetadataJob.Type): + Required. Metadata job type. + import_spec (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec): + Import job specification. + + This field is a member of `oneof`_ ``spec``. + import_result (google.cloud.dataplex_v1.types.MetadataJob.ImportJobResult): + Output only. Import job result. + + This field is a member of `oneof`_ ``result``. + status (google.cloud.dataplex_v1.types.MetadataJob.Status): + Output only. Metadata job status. + """ + + class Type(proto.Enum): + r"""Metadata job type. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + IMPORT (1): + Import job. + """ + TYPE_UNSPECIFIED = 0 + IMPORT = 1 + + class ImportJobResult(proto.Message): + r"""Results from a metadata import job. + + Attributes: + deleted_entries (int): + Output only. The total number of entries that + were deleted. + updated_entries (int): + Output only. The total number of entries that + were updated. + created_entries (int): + Output only. The total number of entries that + were created. + unchanged_entries (int): + Output only. The total number of entries that + were unchanged. + recreated_entries (int): + Output only. The total number of entries that + were recreated. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + + deleted_entries: int = proto.Field( + proto.INT64, + number=1, + ) + updated_entries: int = proto.Field( + proto.INT64, + number=2, + ) + created_entries: int = proto.Field( + proto.INT64, + number=3, + ) + unchanged_entries: int = proto.Field( + proto.INT64, + number=4, + ) + recreated_entries: int = proto.Field( + proto.INT64, + number=6, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + class ImportJobSpec(proto.Message): + r"""Job specification for a metadata import job + + Attributes: + source_storage_uri (str): + Optional. The URI of a Cloud Storage bucket or folder + (beginning with ``gs://`` and ending with ``/``) that + contains the metadata import files for this job. + + A metadata import file defines the values to set for each of + the entries and aspects in a metadata job. For more + information about how to create a metadata import file and + the file requirements, see `Metadata import + file `__. + + You can provide multiple metadata import files in the same + metadata job. The bucket or folder must contain at least one + metadata import file, in JSON Lines format (either ``.json`` + or ``.jsonl`` file extension). + + In ``FULL`` entry sync mode, don't save the metadata import + file in a folder named ``SOURCE_STORAGE_URI/deletions/``. + + **Caution**: If the metadata import file contains no data, + all entries and aspects that belong to the job's scope are + deleted. + source_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when the process that + created the metadata import files began. + scope (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.ImportJobScope): + Required. A boundary on the scope of impact + that the metadata import job can have. + entry_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for entries. Only ``FULL`` mode is + supported for entries. All entries in the job's scope are + modified. If an entry exists in Dataplex but isn't included + in the metadata import file, the entry is deleted when you + run the metadata job. + aspect_sync_mode (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.SyncMode): + Required. The sync mode for aspects. Only ``INCREMENTAL`` + mode is supported for aspects. An aspect is modified only if + the metadata import file includes a reference to the aspect + in the ``update_mask`` field and the ``aspect_keys`` field. + log_level (google.cloud.dataplex_v1.types.MetadataJob.ImportJobSpec.LogLevel): + Optional. The level of logs to write to Cloud Logging for + this job. + + Debug-level logs provide highly-detailed information for + troubleshooting, but their increased verbosity could incur + `additional + costs `__ that + might not be merited for all jobs. + + If unspecified, defaults to ``INFO``. + """ + + class SyncMode(proto.Enum): + r"""Specifies how the entries and aspects in a metadata job are + updated. + + Values: + SYNC_MODE_UNSPECIFIED (0): + Sync mode unspecified. + FULL (1): + All resources in the job's scope are + modified. If a resource exists in Dataplex but + isn't included in the metadata import file, the + resource is deleted when you run the metadata + job. Use this mode to perform a full sync of the + set of entries in the job scope. + INCREMENTAL (2): + Only the entries and aspects that are + explicitly included in the metadata import file + are modified. Use this mode to modify a subset + of resources while leaving unreferenced + resources unchanged. + """ + SYNC_MODE_UNSPECIFIED = 0 + FULL = 1 + INCREMENTAL = 2 + + class LogLevel(proto.Enum): + r"""The level of logs to write to Cloud Logging for this job. + + Values: + LOG_LEVEL_UNSPECIFIED (0): + Log level unspecified. + DEBUG (1): + Debug-level logging. Captures detailed logs for each import + item. Use debug-level logging to troubleshoot issues with + specific import items. For example, use debug-level logging + to identify resources that are missing from the job scope, + entries or aspects that don't conform to the associated + entry type or aspect type, or other misconfigurations with + the metadata import file. + + Depending on the size of your metadata job and the number of + logs that are generated, debug-level logging might incur + `additional + costs `__. + INFO (2): + Info-level logging. Captures logs at the + overall job level. Includes aggregate logs about + import items, but doesn't specify which import + item has an error. + """ + LOG_LEVEL_UNSPECIFIED = 0 + DEBUG = 1 + INFO = 2 + + class ImportJobScope(proto.Message): + r"""A boundary on the scope of impact that the metadata import + job can have. + + Attributes: + entry_groups (MutableSequence[str]): + Required. The entry group that is in scope for the import + job, specified as a relative resource name in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryGroups/{entry_group_id}``. + Only entries that belong to the specified entry group are + affected by the job. + + Must contain exactly one element. The entry group and the + job must be in the same location. + entry_types (MutableSequence[str]): + Required. The entry types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/entryTypes/{entry_type_id}``. + The job modifies only the entries that belong to these entry + types. + + If the metadata import file attempts to modify an entry + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an entry type must either match the location + of the job, or the entry type must be global. + aspect_types (MutableSequence[str]): + Optional. The aspect types that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/aspectTypes/{aspect_type_id}``. + The job modifies only the aspects that belong to these + aspect types. + + If the metadata import file attempts to modify an aspect + whose type isn't included in this list, the import job is + halted before modifying any entries or aspects. + + The location of an aspect type must either match the + location of the job, or the aspect type must be global. + """ + + entry_groups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entry_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + aspect_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + source_storage_uri: str = proto.Field( + proto.STRING, + number=1, + ) + source_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + scope: "MetadataJob.ImportJobSpec.ImportJobScope" = proto.Field( + proto.MESSAGE, + number=2, + message="MetadataJob.ImportJobSpec.ImportJobScope", + ) + entry_sync_mode: "MetadataJob.ImportJobSpec.SyncMode" = proto.Field( + proto.ENUM, + number=3, + enum="MetadataJob.ImportJobSpec.SyncMode", + ) + aspect_sync_mode: "MetadataJob.ImportJobSpec.SyncMode" = proto.Field( + proto.ENUM, + number=4, + enum="MetadataJob.ImportJobSpec.SyncMode", + ) + log_level: "MetadataJob.ImportJobSpec.LogLevel" = proto.Field( + proto.ENUM, + number=6, + enum="MetadataJob.ImportJobSpec.LogLevel", + ) + + class Status(proto.Message): + r"""Metadata job status. + + Attributes: + state (google.cloud.dataplex_v1.types.MetadataJob.Status.State): + Output only. State of the metadata job. + message (str): + Output only. Message relating to the + progression of a metadata job. + completion_percent (int): + Output only. Progress tracking. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the status was + updated. + """ + + class State(proto.Enum): + r"""State of a metadata job. + + Values: + STATE_UNSPECIFIED (0): + State unspecified. + QUEUED (1): + The job is queued. + RUNNING (2): + The job is running. + CANCELING (3): + The job is being canceled. + CANCELED (4): + The job is canceled. + SUCCEEDED (5): + The job succeeded. + FAILED (6): + The job failed. + SUCCEEDED_WITH_ERRORS (7): + The job completed with some errors. + """ + STATE_UNSPECIFIED = 0 + QUEUED = 1 + RUNNING = 2 + CANCELING = 3 + CANCELED = 4 + SUCCEEDED = 5 + FAILED = 6 + SUCCEEDED_WITH_ERRORS = 7 + + state: "MetadataJob.Status.State" = proto.Field( + proto.ENUM, + number=1, + enum="MetadataJob.Status.State", + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + completion_percent: int = proto.Field( + proto.INT32, + number=3, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + import_spec: ImportJobSpec = proto.Field( + proto.MESSAGE, + number=100, + oneof="spec", + message=ImportJobSpec, + ) + import_result: ImportJobResult = proto.Field( + proto.MESSAGE, + number=200, + oneof="result", + message=ImportJobResult, + ) + status: Status = proto.Field( + proto.MESSAGE, + number=7, + message=Status, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py new file mode 100644 index 000000000000..8aa7e98b9e83 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataplex.v1", + manifest={ + "DataDiscoverySpec", + "DataDiscoveryResult", + }, +) + + +class DataDiscoverySpec(proto.Message): + r"""Spec for a data discovery scan. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bigquery_publishing_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig): + Optional. Configuration for metadata + publishing. + storage_config (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig): + Cloud Storage related configurations. + + This field is a member of `oneof`_ ``resource_config``. + """ + + class BigQueryPublishingConfig(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + table_type (google.cloud.dataplex_v1.types.DataDiscoverySpec.BigQueryPublishingConfig.TableType): + Optional. Determines whether to publish + discovered tables as BigLake external tables or + non-BigLake external tables. + connection (str): + Optional. The BigQuery connection used to create BigLake + tables. Must be in the form + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + """ + + class TableType(proto.Enum): + r"""Determines how discovered tables are published. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + Table type unspecified. + EXTERNAL (1): + Default. Discovered tables are published as + BigQuery external tables whose data is accessed + using the credentials of the user querying the + table. + BIGLAKE (2): + Discovered tables are published as BigLake + external tables whose data is accessed using the + credentials of the associated BigQuery + connection. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL = 1 + BIGLAKE = 2 + + table_type: "DataDiscoverySpec.BigQueryPublishingConfig.TableType" = ( + proto.Field( + proto.ENUM, + number=2, + enum="DataDiscoverySpec.BigQueryPublishingConfig.TableType", + ) + ) + connection: str = proto.Field( + proto.STRING, + number=3, + ) + + class StorageConfig(proto.Message): + r"""Configurations related to Cloud Storage as the data source. + + Attributes: + include_patterns (MutableSequence[str]): + Optional. Defines the data to include during + discovery when only a subset of the data should + be considered. Provide a list of patterns that + identify the data to include. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + exclude_patterns (MutableSequence[str]): + Optional. Defines the data to exclude during + discovery. Provide a list of patterns that + identify the data to exclude. For Cloud Storage + bucket assets, these patterns are interpreted as + glob patterns used to match object names. For + BigQuery dataset assets, these patterns are + interpreted as patterns to match table names. + csv_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.CsvOptions): + Optional. Configuration for CSV data. + json_options (google.cloud.dataplex_v1.types.DataDiscoverySpec.StorageConfig.JsonOptions): + Optional. Configuration for JSON data. + """ + + class CsvOptions(proto.Message): + r"""Describes CSV and similar semi-structured data formats. + + Attributes: + header_rows (int): + Optional. The number of rows to interpret as + header rows that should be skipped when reading + data rows. + delimiter (str): + Optional. The delimiter that is used to separate values. The + default is ``,`` (comma). + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for CSV data. If true, all columns + are registered as strings. + quote (str): + Optional. The character used to quote column values. Accepts + ``"`` (double quotation mark) or ``'`` (single quotation + mark). If unspecified, defaults to ``"`` (double quotation + mark). + """ + + header_rows: int = proto.Field( + proto.INT32, + number=1, + ) + delimiter: str = proto.Field( + proto.STRING, + number=2, + ) + encoding: str = proto.Field( + proto.STRING, + number=3, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + quote: str = proto.Field( + proto.STRING, + number=5, + ) + + class JsonOptions(proto.Message): + r"""Describes JSON data format. + + Attributes: + encoding (str): + Optional. The character encoding of the data. + The default is UTF-8. + type_inference_disabled (bool): + Optional. Whether to disable the inference of + data types for JSON data. If true, all columns + are registered as their primitive types + (strings, number, or boolean). + """ + + encoding: str = proto.Field( + proto.STRING, + number=1, + ) + type_inference_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + include_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + exclude_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + csv_options: "DataDiscoverySpec.StorageConfig.CsvOptions" = proto.Field( + proto.MESSAGE, + number=3, + message="DataDiscoverySpec.StorageConfig.CsvOptions", + ) + json_options: "DataDiscoverySpec.StorageConfig.JsonOptions" = proto.Field( + proto.MESSAGE, + number=4, + message="DataDiscoverySpec.StorageConfig.JsonOptions", + ) + + bigquery_publishing_config: BigQueryPublishingConfig = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishingConfig, + ) + storage_config: StorageConfig = proto.Field( + proto.MESSAGE, + number=100, + oneof="resource_config", + message=StorageConfig, + ) + + +class DataDiscoveryResult(proto.Message): + r"""The output of a data discovery scan. + + Attributes: + bigquery_publishing (google.cloud.dataplex_v1.types.DataDiscoveryResult.BigQueryPublishing): + Output only. Configuration for metadata + publishing. + """ + + class BigQueryPublishing(proto.Message): + r"""Describes BigQuery publishing configurations. + + Attributes: + dataset (str): + Output only. The BigQuery dataset to publish to. It takes + the form ``projects/{project_id}/datasets/{dataset_id}``. If + not set, the service creates a default publishing dataset. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + + bigquery_publishing: BigQueryPublishing = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryPublishing, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index c52550c7eea7..1b0558266596 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -209,15 +209,17 @@ class ProfileInfo(proto.Message): distinct_ratio (float): Ratio of rows with distinct values against total scanned rows. Not available for complex - non-groupable field type RECORD and fields with - REPEATABLE mode. + non-groupable field type, including RECORD, + ARRAY, GEOGRAPHY, and JSON, as well as fields + with REPEATABLE mode. top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): The list of top N non-null values, frequency and ratio with which they occur in the scanned data. N is 10 or equal to the number of distinct values in the field, whichever is smaller. Not - available for complex non-groupable field type - RECORD and fields with REPEATABLE mode. + available for complex non-groupable field type, + including RECORD, ARRAY, GEOGRAPHY, and JSON, as + well as fields with REPEATABLE mode. string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): String type field information. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index b071fe7e6e2d..e46f60dcc3c8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -472,7 +472,7 @@ class DataQualityDimension(proto.Message): name (str): The dimension name a rule belongs to. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "INTEGRITY"] + "UNIQUENESS", "FRESHNESS", "VOLUME"] """ name: str = proto.Field( @@ -557,7 +557,7 @@ class DataQualityRule(proto.Message): Required. The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "INTEGRITY"]** + "UNIQUENESS", "FRESHNESS", "VOLUME"]** threshold (float): Optional. The minimum ratio of **passing_rows / total_rows** required to pass this rule, with a range of [0.0, 1.0]. @@ -577,6 +577,9 @@ class DataQualityRule(proto.Message): Optional. Description of the rule. - The maximum length is 1,024 characters. + suspended (bool): + Optional. Whether the Rule is active or + suspended. Default is false. """ class RangeExpectation(proto.Message): @@ -875,6 +878,10 @@ class SqlAssertion(proto.Message): proto.STRING, number=505, ) + suspended: bool = proto.Field( + proto.BOOL, + number=506, + ) class DataQualityColumnResult(proto.Message): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index e64a238b7129..eb0eea7e0688 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -22,6 +22,7 @@ import proto # type: ignore from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, processing, @@ -52,19 +53,22 @@ class DataScanType(proto.Enum): - r"""The type of DataScan. + r"""The type of data scan. Values: DATA_SCAN_TYPE_UNSPECIFIED (0): - The DataScan type is unspecified. + The data scan type is unspecified. DATA_QUALITY (1): - Data Quality scan. + Data quality scan. DATA_PROFILE (2): - Data Profile scan. + Data profile scan. + DATA_DISCOVERY (3): + Data discovery scan. """ DATA_SCAN_TYPE_UNSPECIFIED = 0 DATA_QUALITY = 1 DATA_PROFILE = 2 + DATA_DISCOVERY = 3 class CreateDataScanRequest(proto.Message): @@ -543,20 +547,29 @@ class DataScan(proto.Message): type_ (google.cloud.dataplex_v1.types.DataScanType): Output only. The type of DataScan. data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - DataQualityScan related setting. + Settings for a data quality scan. This field is a member of `oneof`_ ``spec``. data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - DataProfileScan related setting. + Settings for a data profile scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Settings for a data discovery scan. This field is a member of `oneof`_ ``spec``. data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of the data quality + Output only. The result of a data quality scan. This field is a member of `oneof`_ ``result``. data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of the data profile + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery scan. This field is a member of `oneof`_ ``result``. @@ -694,6 +707,12 @@ class ExecutionStatus(proto.Message): oneof="spec", message=data_profile.DataProfileSpec, ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof="spec", + message=data_discovery.DataDiscoverySpec, + ) data_quality_result: data_quality.DataQualityResult = proto.Field( proto.MESSAGE, number=200, @@ -706,6 +725,12 @@ class ExecutionStatus(proto.Message): oneof="result", message=data_profile.DataProfileResult, ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof="result", + message=data_discovery.DataDiscoveryResult, + ) class DataScanJob(proto.Message): @@ -728,6 +753,9 @@ class DataScanJob(proto.Message): uid (str): Output only. System generated globally unique ID for the DataScanJob. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the DataScanJob + was created. start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the DataScanJob was started. @@ -743,20 +771,32 @@ class DataScanJob(proto.Message): type_ (google.cloud.dataplex_v1.types.DataScanType): Output only. The type of the parent DataScan. data_quality_spec (google.cloud.dataplex_v1.types.DataQualitySpec): - Output only. DataQualityScan related setting. + Output only. Settings for a data quality + scan. This field is a member of `oneof`_ ``spec``. data_profile_spec (google.cloud.dataplex_v1.types.DataProfileSpec): - Output only. DataProfileScan related setting. + Output only. Settings for a data profile + scan. + + This field is a member of `oneof`_ ``spec``. + data_discovery_spec (google.cloud.dataplex_v1.types.DataDiscoverySpec): + Output only. Settings for a data discovery + scan. This field is a member of `oneof`_ ``spec``. data_quality_result (google.cloud.dataplex_v1.types.DataQualityResult): - Output only. The result of the data quality + Output only. The result of a data quality scan. This field is a member of `oneof`_ ``result``. data_profile_result (google.cloud.dataplex_v1.types.DataProfileResult): - Output only. The result of the data profile + Output only. The result of a data profile + scan. + + This field is a member of `oneof`_ ``result``. + data_discovery_result (google.cloud.dataplex_v1.types.DataDiscoveryResult): + Output only. The result of a data discovery scan. This field is a member of `oneof`_ ``result``. @@ -799,6 +839,11 @@ class State(proto.Enum): proto.STRING, number=2, ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, @@ -835,6 +880,12 @@ class State(proto.Enum): oneof="spec", message=data_profile.DataProfileSpec, ) + data_discovery_spec: data_discovery.DataDiscoverySpec = proto.Field( + proto.MESSAGE, + number=102, + oneof="spec", + message=data_discovery.DataDiscoverySpec, + ) data_quality_result: data_quality.DataQualityResult = proto.Field( proto.MESSAGE, number=200, @@ -847,6 +898,12 @@ class State(proto.Enum): oneof="result", message=data_profile.DataProfileResult, ) + data_discovery_result: data_discovery.DataDiscoveryResult = proto.Field( + proto.MESSAGE, + number=202, + oneof="result", + message=data_discovery.DataDiscoveryResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index dd54639a3934..463d15aeffc6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -55,6 +55,9 @@ class DiscoveryEvent(proto.Message): The id of the associated asset. data_location (str): The data location associated with the event. + datascan_id (str): + The id of the associated datascan for + standalone discovery. type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.EventType): The type of the event being logged. config (google.cloud.dataplex_v1.types.DiscoveryEvent.ConfigDetails): @@ -76,6 +79,11 @@ class DiscoveryEvent(proto.Message): Details about the action associated with the event. + This field is a member of `oneof`_ ``details``. + table (google.cloud.dataplex_v1.types.DiscoveryEvent.TableDetails): + Details about the BigQuery table publishing + associated with the event. + This field is a member of `oneof`_ ``details``. """ @@ -106,6 +114,16 @@ class EventType(proto.Enum): PARTITION_DELETED (7): An event representing a partition being deleted. + TABLE_PUBLISHED (10): + An event representing a table being + published. + TABLE_UPDATED (11): + An event representing a table being updated. + TABLE_IGNORED (12): + An event representing a table being skipped + in publishing. + TABLE_DELETED (13): + An event representing a table being deleted. """ EVENT_TYPE_UNSPECIFIED = 0 CONFIG = 1 @@ -115,6 +133,10 @@ class EventType(proto.Enum): PARTITION_CREATED = 5 PARTITION_UPDATED = 6 PARTITION_DELETED = 7 + TABLE_PUBLISHED = 10 + TABLE_UPDATED = 11 + TABLE_IGNORED = 12 + TABLE_DELETED = 13 class EntityType(proto.Enum): r"""The type of the entity. @@ -131,6 +153,24 @@ class EntityType(proto.Enum): TABLE = 1 FILESET = 2 + class TableType(proto.Enum): + r"""The type of the published table. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + An unspecified table type. + EXTERNAL_TABLE (1): + External table type. + BIGLAKE_TABLE (2): + BigLake table type. + OBJECT_TABLE (3): + Object table type for unstructured data. + """ + TABLE_TYPE_UNSPECIFIED = 0 + EXTERNAL_TABLE = 1 + BIGLAKE_TABLE = 2 + OBJECT_TABLE = 3 + class ConfigDetails(proto.Message): r"""Details about configuration events. @@ -170,6 +210,27 @@ class EntityDetails(proto.Message): enum="DiscoveryEvent.EntityType", ) + class TableDetails(proto.Message): + r"""Details about the published table. + + Attributes: + table (str): + The fully-qualified resource name of the + table resource. + type_ (google.cloud.dataplex_v1.types.DiscoveryEvent.TableType): + The type of the table resource. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "DiscoveryEvent.TableType" = proto.Field( + proto.ENUM, + number=2, + enum="DiscoveryEvent.TableType", + ) + class PartitionDetails(proto.Message): r"""Details about the partition. @@ -213,12 +274,19 @@ class ActionDetails(proto.Message): type_ (str): The type of action. Eg. IncompatibleDataSchema, InvalidDataFormat + issue (str): + The human readable issue associated with the + action. """ type_: str = proto.Field( proto.STRING, number=1, ) + issue: str = proto.Field( + proto.STRING, + number=2, + ) message: str = proto.Field( proto.STRING, @@ -240,6 +308,10 @@ class ActionDetails(proto.Message): proto.STRING, number=5, ) + datascan_id: str = proto.Field( + proto.STRING, + number=6, + ) type_: EventType = proto.Field( proto.ENUM, number=10, @@ -269,6 +341,12 @@ class ActionDetails(proto.Message): oneof="details", message=ActionDetails, ) + table: TableDetails = proto.Field( + proto.MESSAGE, + number=24, + oneof="details", + message=TableDetails, + ) class JobEvent(proto.Message): @@ -699,7 +777,6 @@ class EntityType(proto.Enum): class DataScanEvent(proto.Message): r"""These messages contain information about the execution of a datascan. The monitored resource is 'DataScan' - Next ID: 13 This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -770,10 +847,13 @@ class ScanType(proto.Enum): Data scan for data profile. DATA_QUALITY (2): Data scan for data quality. + DATA_DISCOVERY (4): + Data scan for data discovery. """ SCAN_TYPE_UNSPECIFIED = 0 DATA_PROFILE = 1 DATA_QUALITY = 2 + DATA_DISCOVERY = 4 class State(proto.Enum): r"""The job state of the data scan. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py index fa68e7f2e372..3fefb6d4d8ef 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py @@ -406,10 +406,9 @@ class SparkTaskConfig(proto.Message): This field is a member of `oneof`_ ``driver``. sql_script_file (str): - A reference to a query file. This can be the Cloud Storage - URI of the query file or it can the path to a SqlScript - Content. The execution args are used to declare a set of - script variables (``set key="value";``). + A reference to a query file. This should be the Cloud + Storage URI of the query file. The execution args are used + to declare a set of script variables (``set key="value";``). This field is a member of `oneof`_ ``driver``. sql_script (str): diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py new file mode 100644 index 000000000000..7490891a58d0 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py new file mode 100644 index 000000000000..803f83d5f306 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_cancel_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CancelMetadataJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_metadata_job(request=request) + + +# [END dataplex_v1_generated_CatalogService_CancelMetadataJob_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py new file mode 100644 index 000000000000..1ecd3586aee7 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py new file mode 100644 index 000000000000..022008b13e72 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + metadata_job = dataplex_v1.MetadataJob() + metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] + metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] + metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" + metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.type_ = "IMPORT" + + request = dataplex_v1.CreateMetadataJobRequest( + parent="parent_value", + metadata_job=metadata_job, + ) + + # Make the request + operation = client.create_metadata_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateMetadataJob_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py new file mode 100644 index 000000000000..144996fdc0c2 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py new file mode 100644 index 000000000000..f2c032d11d82 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_metadata_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMetadataJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetMetadataJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_metadata_job(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetMetadataJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata_job(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetMetadataJob_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py new file mode 100644 index 000000000000..8d07e30862df --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py new file mode 100644 index 000000000000..7c5043546f57 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetadataJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_metadata_jobs(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListMetadataJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_metadata_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_CatalogService_ListMetadataJobs_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index e466410d1430..a7eb15b2cc0d 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,9 +8,164 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.3.1" + "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.cancel_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CancelMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CancelMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CancelMetadataJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "cancel_metadata_job" + }, + "description": "Sample for CancelMetadataJob", + "file": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CancelMetadataJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_cancel_metadata_job_sync.py" + }, { "canonical": true, "clientMethod": { @@ -719,6 +874,183 @@ ], "title": "dataplex_v1_generated_catalog_service_create_entry_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_metadata_job", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateMetadataJob", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1463,7 +1795,168 @@ "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.AspectType", + "shortName": "get_aspect_type" + }, + "description": "Sample for GetAspectType", + "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" }, { "name": "name", @@ -1482,14 +1975,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.AspectType", - "shortName": "get_aspect_type" + "resultType": "google.cloud.dataplex_v1.types.EntryGroup", + "shortName": "get_entry_group" }, - "description": "Sample for GetAspectType", - "file": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py", + "description": "Sample for GetEntryGroup", + "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", "segments": [ { "end": 51, @@ -1522,7 +2015,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" }, { "canonical": true, @@ -1532,19 +2025,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryGroup" + "shortName": "GetEntryType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" }, { "name": "name", @@ -1563,14 +2056,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_async.py", + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", "segments": [ { "end": 51, @@ -1603,7 +2096,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" }, { "canonical": true, @@ -1612,19 +2105,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryGroup" + "shortName": "GetEntryType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" }, { "name": "name", @@ -1643,14 +2136,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryGroup", - "shortName": "get_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryType", + "shortName": "get_entry_type" }, - "description": "Sample for GetEntryGroup", - "file": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py", + "description": "Sample for GetEntryType", + "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", "segments": [ { "end": 51, @@ -1683,7 +2176,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" }, { "canonical": true, @@ -1693,19 +2186,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryType" + "shortName": "GetEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" }, { "name": "name", @@ -1724,14 +2217,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_async.py", + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", "segments": [ { "end": 51, @@ -1764,7 +2257,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_async.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" }, { "canonical": true, @@ -1773,19 +2266,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryType", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntryType" + "shortName": "GetEntry" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryTypeRequest" + "type": "google.cloud.dataplex_v1.types.GetEntryRequest" }, { "name": "name", @@ -1804,14 +2297,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.EntryType", - "shortName": "get_entry_type" + "resultType": "google.cloud.dataplex_v1.types.Entry", + "shortName": "get_entry" }, - "description": "Sample for GetEntryType", - "file": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py", + "description": "Sample for GetEntry", + "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntryType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", "segments": [ { "end": 51, @@ -1844,7 +2337,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" }, { "canonical": true, @@ -1854,19 +2347,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_metadata_job", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntry" + "shortName": "GetMetadataJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" }, { "name": "name", @@ -1885,14 +2378,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_async.py", + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_async", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_async", "segments": [ { "end": 51, @@ -1925,7 +2418,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_async.py" + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_async.py" }, { "canonical": true, @@ -1934,19 +2427,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_metadata_job", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntry", + "fullName": "google.cloud.dataplex.v1.CatalogService.GetMetadataJob", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "GetEntry" + "shortName": "GetMetadataJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.GetEntryRequest" + "type": "google.cloud.dataplex_v1.types.GetMetadataJobRequest" }, { "name": "name", @@ -1965,14 +2458,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataplex_v1.types.Entry", - "shortName": "get_entry" + "resultType": "google.cloud.dataplex_v1.types.MetadataJob", + "shortName": "get_metadata_job" }, - "description": "Sample for GetEntry", - "file": "dataplex_v1_generated_catalog_service_get_entry_sync.py", + "description": "Sample for GetMetadataJob", + "file": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_GetEntry_sync", + "regionTag": "dataplex_v1_generated_CatalogService_GetMetadataJob_sync", "segments": [ { "end": 51, @@ -2005,7 +2498,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_get_entry_sync.py" + "title": "dataplex_v1_generated_catalog_service_get_metadata_job_sync.py" }, { "canonical": true, @@ -2651,6 +3144,167 @@ ], "title": "dataplex_v1_generated_catalog_service_list_entry_types_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsAsyncPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.list_metadata_jobs", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.ListMetadataJobs", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "ListMetadataJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListMetadataJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.catalog_service.pagers.ListMetadataJobsPager", + "shortName": "list_metadata_jobs" + }, + "description": "Sample for ListMetadataJobs", + "file": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_ListMetadataJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_list_metadata_jobs_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index f5c81e4a5f46..6681941351bf 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -40,6 +40,7 @@ class dataplexCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'cancel_job': ('name', ), + 'cancel_metadata_job': ('name', ), 'create_aspect_type': ('parent', 'aspect_type_id', 'aspect_type', 'validate_only', ), 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), 'create_content': ('parent', 'content', 'validate_only', ), @@ -53,6 +54,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), + 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), 'create_partition': ('parent', 'partition', 'validate_only', ), 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), @@ -89,6 +91,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'get_iam_policy': ('resource', 'options', ), 'get_job': ('name', ), 'get_lake': ('name', ), + 'get_metadata_job': ('name', ), 'get_partition': ('name', ), 'get_task': ('name', ), 'get_zone': ('name', ), @@ -109,6 +112,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'list_jobs': ('parent', 'page_size', 'page_token', ), 'list_lake_actions': ('parent', 'page_size', 'page_token', ), 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_metadata_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_sessions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 6198538a8234..72f4328e35a7 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -9568,6 +9568,1584 @@ async def test_search_entries_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateMetadataJobRequest, + dict, + ], +) +def test_create_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateMetadataJobRequest( + parent="parent_value", + metadata_job_id="metadata_job_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateMetadataJobRequest( + parent="parent_value", + metadata_job_id="metadata_job_id_value", + ) + + +def test_create_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_metadata_job in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_metadata_job + ] = mock_rpc + request = {} + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_metadata_job + ] = mock_rpc + + request = {} + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.CreateMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_metadata_job_async_from_dict(): + await test_create_metadata_job_async(request_type=dict) + + +def test_create_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateMetadataJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_metadata_job( + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name="name_value") + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = "metadata_job_id_value" + assert arg == mock_val + + +def test_create_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_metadata_job( + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metadata_job + mock_val = catalog.MetadataJob(name="name_value") + assert arg == mock_val + arg = args[0].metadata_job_id + mock_val = "metadata_job_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetMetadataJobRequest, + dict, + ], +) +def test_get_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + response = client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +def test_get_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetMetadataJobRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetMetadataJobRequest( + name="name_value", + ) + + +def test_get_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_metadata_job + ] = mock_rpc + request = {} + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_metadata_job + ] = mock_rpc + + request = {} + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.GetMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + response = await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.MetadataJob) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT + + +@pytest.mark.asyncio +async def test_get_metadata_job_async_from_dict(): + await test_get_metadata_job_async(request_type=dict) + + +def test_get_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + await client.get_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.MetadataJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.MetadataJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListMetadataJobsRequest, + dict, + ], +) +def test_list_metadata_jobs(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + response = client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +def test_list_metadata_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.ListMetadataJobsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_metadata_jobs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.ListMetadataJobsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_metadata_jobs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_metadata_jobs in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_metadata_jobs + ] = mock_rpc + request = {} + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_metadata_jobs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_metadata_jobs + ] = mock_rpc + + request = {} + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_metadata_jobs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async( + transport: str = "grpc_asyncio", request_type=catalog.ListMetadataJobsRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + response = await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.ListMetadataJobsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMetadataJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_from_dict(): + await test_list_metadata_jobs_async(request_type=dict) + + +def test_list_metadata_jobs_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.ListMetadataJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse() + ) + await client.list_metadata_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_metadata_jobs_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_metadata_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_metadata_jobs_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.ListMetadataJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_metadata_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) + + +def test_list_metadata_jobs_pager(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_metadata_jobs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in results) + + +def test_list_metadata_jobs_pages(transport_name: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_metadata_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pager(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_metadata_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_metadata_jobs_async_pages(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_metadata_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CancelMetadataJobRequest, + dict, + ], +) +def test_cancel_metadata_job(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_metadata_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CancelMetadataJobRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_metadata_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CancelMetadataJobRequest( + name="name_value", + ) + + +def test_cancel_metadata_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.cancel_metadata_job in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_metadata_job + ] = mock_rpc + request = {} + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_metadata_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_metadata_job + ] = mock_rpc + + request = {} + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.cancel_metadata_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async( + transport: str = "grpc_asyncio", request_type=catalog.CancelMetadataJobRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CancelMetadataJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_async_from_dict(): + await test_cancel_metadata_job_async(request_type=dict) + + +def test_cancel_metadata_job_field_headers(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CancelMetadataJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_metadata_job_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_metadata_job_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_metadata_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_metadata_job_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CatalogServiceGrpcTransport( @@ -10157,6 +11735,96 @@ def test_search_entries_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -10806,6 +12474,117 @@ async def test_search_entries_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CatalogServiceClient( @@ -10861,6 +12640,10 @@ def test_catalog_service_base_transport(): "get_entry", "lookup_entry", "search_entries", + "create_metadata_job", + "get_metadata_job", + "list_metadata_jobs", + "cancel_metadata_job", "get_location", "list_locations", "get_operation", @@ -11362,8 +13145,36 @@ def test_parse_entry_type_path(): assert expected == actual +def test_metadata_job_path(): + project = "whelk" + location = "octopus" + metadataJob = "oyster" + expected = ( + "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( + project=project, + location=location, + metadataJob=metadataJob, + ) + ) + actual = CatalogServiceClient.metadata_job_path(project, location, metadataJob) + assert expected == actual + + +def test_parse_metadata_job_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "metadataJob": "mussel", + } + path = CatalogServiceClient.metadata_job_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_metadata_job_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -11373,7 +13184,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nautilus", } path = CatalogServiceClient.common_billing_account_path(**expected) @@ -11383,7 +13194,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -11393,7 +13204,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "abalone", } path = CatalogServiceClient.common_folder_path(**expected) @@ -11403,7 +13214,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -11413,7 +13224,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "clam", } path = CatalogServiceClient.common_organization_path(**expected) @@ -11423,7 +13234,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -11433,7 +13244,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "octopus", } path = CatalogServiceClient.common_project_path(**expected) @@ -11443,8 +13254,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -11455,8 +13266,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = CatalogServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index ada3a77c02bd..2dc3372cf740 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -71,6 +71,7 @@ transports, ) from google.cloud.dataplex_v1.types import ( + data_discovery, data_profile, data_quality, datascans, @@ -5508,10 +5509,38 @@ def test_data_scan_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_data_scan_path(): +def test_connection_path(): project = "squid" location = "clam" - dataScan = "whelk" + connection = "whelk" + expected = ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + actual = DataScanServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection": "nudibranch", + } + path = DataScanServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_connection_path(path) + assert expected == actual + + +def test_data_scan_path(): + project = "cuttlefish" + location = "mussel" + dataScan = "winkle" expected = "projects/{project}/locations/{location}/dataScans/{dataScan}".format( project=project, location=location, @@ -5523,9 +5552,9 @@ def test_data_scan_path(): def test_parse_data_scan_path(): expected = { - "project": "octopus", - "location": "oyster", - "dataScan": "nudibranch", + "project": "nautilus", + "location": "scallop", + "dataScan": "abalone", } path = DataScanServiceClient.data_scan_path(**expected) @@ -5535,10 +5564,10 @@ def test_parse_data_scan_path(): def test_data_scan_job_path(): - project = "cuttlefish" - location = "mussel" - dataScan = "winkle" - job = "nautilus" + project = "squid" + location = "clam" + dataScan = "whelk" + job = "octopus" expected = "projects/{project}/locations/{location}/dataScans/{dataScan}/jobs/{job}".format( project=project, location=location, @@ -5551,10 +5580,10 @@ def test_data_scan_job_path(): def test_parse_data_scan_job_path(): expected = { - "project": "scallop", - "location": "abalone", - "dataScan": "squid", - "job": "clam", + "project": "oyster", + "location": "nudibranch", + "dataScan": "cuttlefish", + "job": "mussel", } path = DataScanServiceClient.data_scan_job_path(**expected) @@ -5563,12 +5592,35 @@ def test_parse_data_scan_job_path(): assert expected == actual +def test_dataset_path(): + project = "winkle" + dataset = "nautilus" + expected = "projects/{project}/datasets/{dataset}".format( + project=project, + dataset=dataset, + ) + actual = DataScanServiceClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "scallop", + "dataset": "abalone", + } + path = DataScanServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = DataScanServiceClient.parse_dataset_path(path) + assert expected == actual + + def test_entity_path(): - project = "whelk" - location = "octopus" - lake = "oyster" - zone = "nudibranch" - entity = "cuttlefish" + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" + entity = "oyster" expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}/entities/{entity}".format( project=project, location=location, @@ -5582,11 +5634,11 @@ def test_entity_path(): def test_parse_entity_path(): expected = { - "project": "mussel", - "location": "winkle", - "lake": "nautilus", - "zone": "scallop", - "entity": "abalone", + "project": "nudibranch", + "location": "cuttlefish", + "lake": "mussel", + "zone": "winkle", + "entity": "nautilus", } path = DataScanServiceClient.entity_path(**expected) @@ -5596,7 +5648,7 @@ def test_parse_entity_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5606,7 +5658,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "abalone", } path = DataScanServiceClient.common_billing_account_path(**expected) @@ -5616,7 +5668,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -5626,7 +5678,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "clam", } path = DataScanServiceClient.common_folder_path(**expected) @@ -5636,7 +5688,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -5646,7 +5698,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "octopus", } path = DataScanServiceClient.common_organization_path(**expected) @@ -5656,7 +5708,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -5666,7 +5718,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nudibranch", } path = DataScanServiceClient.common_project_path(**expected) @@ -5676,8 +5728,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5688,8 +5740,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "winkle", + "location": "nautilus", } path = DataScanServiceClient.common_location_path(**expected) diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 71b2ebb536c2..effa34a100d7 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -223,20 +223,8 @@ replacements: - paths: [ packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py, ] - before: | - : - \ \"entry_source.display_name=AnExampleDisplayName\" - \ \"entry_type=projects\/example-project\/locations\/global\/entryTypes\/example-entry_type\" - \ \"entry_type=projects\/example-project\/locations\/us\/entryTypes\/a\* - \ OR entry_type=projects\/another-project\/locations\/\*\" \"NOT - \ entry_source.display_name=AnotherExampleDisplayName\". - after: | - : - `entry_source.display_name=AnExampleDisplayName` - `entry_type=projects/example-project/locations/global/entryTypes/example-entry_type` - `entry_type=projects/example-project/locations/us/entryTypes/a* - OR entry_type=projects/another-project/locations/*` `NOT - entry_source.display_name=AnotherExampleDisplayName`. + before: entry_type=projects\/another-project\/locations\/\* + after: "entry_type=projects/another-project/locations/\\*" count: 1 - paths: [ packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py,